comment
stringlengths
16
8.84k
method_body
stringlengths
37
239k
target_code
stringlengths
0
242
method_body_after
stringlengths
29
239k
context_before
stringlengths
14
424k
context_after
stringlengths
14
284k
Wonder if we need some cleanup registry in the future to close all these connections when we shutdown the JVM.
public void close() throws Exception { try (CloseableResource<JobServiceBlockingStub> jobService = this.jobService) {} }
try (CloseableResource<JobServiceBlockingStub> jobService = this.jobService) {}
public void close() { try (CloseableResource<JobServiceBlockingStub> jobService = this.jobService) { } catch (Exception e) { LOG.warn("Error cleaning up job service", e); } }
class JobServicePipelineResult implements PipelineResult, AutoCloseable { private static final long POLL_INTERVAL_MS = 10 * 1000; private static final Logger LOG = LoggerFactory.getLogger(JobServicePipelineResult.class); private final ByteString jobId; private final CloseableResource<JobServiceBlockingStub> jobService; @Nullable private State terminationState; JobServicePipelineResult(ByteString jobId, CloseableResource<JobServiceBlockingStub> jobService) { this.jobId = jobId; this.jobService = jobService; this.terminationState = null; } @Override public State getState() { if(terminationState != null){ return terminationState; } JobServiceBlockingStub stub = jobService.get(); GetJobStateResponse response = stub.getState(GetJobStateRequest.newBuilder().setJobIdBytes(jobId).build()); return getJavaState(response.getState()); } @Override public State cancel() { JobServiceBlockingStub stub = jobService.get(); CancelJobResponse response = stub.cancel(CancelJobRequest.newBuilder().setJobIdBytes(jobId).build()); return getJavaState(response.getState()); } @Nullable @Override public State waitUntilFinish(Duration duration) { if (duration.compareTo(Duration.millis(1)) < 1) { return waitUntilFinish(); } else { CompletableFuture<State> result = CompletableFuture.supplyAsync(this::waitUntilFinish); try { return result.get(duration.getMillis(), TimeUnit.MILLISECONDS); } catch (TimeoutException e) { return null; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } } @Override public State waitUntilFinish() { if(terminationState != null){ return terminationState; } JobServiceBlockingStub stub = jobService.get(); GetJobStateRequest request = GetJobStateRequest.newBuilder().setJobIdBytes(jobId).build(); GetJobStateResponse response = stub.getState(request); State lastState = getJavaState(response.getState()); while (!lastState.isTerminal()) { try { Thread.sleep(POLL_INTERVAL_MS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } response = stub.getState(request); lastState = getJavaState(response.getState()); } terminationState = lastState; return lastState; } @Override public MetricResults metrics() { throw new UnsupportedOperationException("Not yet implemented."); } @Override private static State getJavaState(JobApi.JobState.Enum protoState) { switch (protoState) { case UNSPECIFIED: return State.UNKNOWN; case STOPPED: return State.STOPPED; case RUNNING: return State.RUNNING; case DONE: return State.DONE; case FAILED: return State.FAILED; case CANCELLED: return State.CANCELLED; case UPDATED: return State.UPDATED; case DRAINING: return State.UNKNOWN; case DRAINED: return State.UNKNOWN; case STARTING: return State.RUNNING; case CANCELLING: return State.CANCELLED; default: LOG.warn("Unrecognized state from server: {}", protoState); return State.UNKNOWN; } } }
class JobServicePipelineResult implements PipelineResult, AutoCloseable { private static final long POLL_INTERVAL_MS = 10 * 1000; private static final Logger LOG = LoggerFactory.getLogger(JobServicePipelineResult.class); private final ByteString jobId; private final CloseableResource<JobServiceBlockingStub> jobService; @Nullable private State terminationState; JobServicePipelineResult(ByteString jobId, CloseableResource<JobServiceBlockingStub> jobService) { this.jobId = jobId; this.jobService = jobService; this.terminationState = null; } @Override public State getState() { if (terminationState != null) { return terminationState; } JobServiceBlockingStub stub = jobService.get(); GetJobStateResponse response = stub.getState(GetJobStateRequest.newBuilder().setJobIdBytes(jobId).build()); return getJavaState(response.getState()); } @Override public State cancel() { JobServiceBlockingStub stub = jobService.get(); CancelJobResponse response = stub.cancel(CancelJobRequest.newBuilder().setJobIdBytes(jobId).build()); return getJavaState(response.getState()); } @Nullable @Override public State waitUntilFinish(Duration duration) { if (duration.compareTo(Duration.millis(1)) < 1) { return waitUntilFinish(); } else { CompletableFuture<State> result = CompletableFuture.supplyAsync(this::waitUntilFinish); try { return result.get(duration.getMillis(), TimeUnit.MILLISECONDS); } catch (TimeoutException e) { return null; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } } @Override public State waitUntilFinish() { if (terminationState != null) { return terminationState; } JobServiceBlockingStub stub = jobService.get(); GetJobStateRequest request = GetJobStateRequest.newBuilder().setJobIdBytes(jobId).build(); GetJobStateResponse response = stub.getState(request); State lastState = getJavaState(response.getState()); while (!lastState.isTerminal()) { try { Thread.sleep(POLL_INTERVAL_MS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } response = stub.getState(request); lastState = getJavaState(response.getState()); } close(); terminationState = lastState; return lastState; } @Override public MetricResults metrics() { throw new UnsupportedOperationException("Not yet implemented."); } @Override private static State getJavaState(JobApi.JobState.Enum protoState) { switch (protoState) { case UNSPECIFIED: return State.UNKNOWN; case STOPPED: return State.STOPPED; case RUNNING: return State.RUNNING; case DONE: return State.DONE; case FAILED: return State.FAILED; case CANCELLED: return State.CANCELLED; case UPDATED: return State.UPDATED; case DRAINING: return State.UNKNOWN; case DRAINED: return State.UNKNOWN; case STARTING: return State.RUNNING; case CANCELLING: return State.CANCELLED; default: LOG.warn("Unrecognized state from server: {}", protoState); return State.UNKNOWN; } } }
Same as above. Sorry that I forget to rewind this when I rewind the merging of FLINK-21328 and FLINK-21330.
public void returnsIncidentBlockingPartitions() throws Exception { final JobVertex a = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex b = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex c = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex d = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex e = ExecutionGraphTestUtils.createNoOpVertex(1); b.connectNewDataSetAsInput(a, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); c.connectNewDataSetAsInput(b, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); d.connectNewDataSetAsInput(b, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); e.connectNewDataSetAsInput(c, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); e.connectNewDataSetAsInput(d, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); final DefaultExecutionGraph simpleTestGraph = ExecutionGraphTestUtils.createSimpleTestGraph(a, b, c, d, e); final DefaultExecutionTopology topology = DefaultExecutionTopology.fromExecutionGraph(simpleTestGraph); final DefaultSchedulingPipelinedRegion firstPipelinedRegion = topology.getPipelinedRegionOfVertex(new ExecutionVertexID(a.getID(), 0)); final DefaultSchedulingPipelinedRegion secondPipelinedRegion = topology.getPipelinedRegionOfVertex(new ExecutionVertexID(e.getID(), 0)); final DefaultExecutionVertex vertexB0 = topology.getVertex(new ExecutionVertexID(b.getID(), 0)); final IntermediateResultPartitionID b0ConsumedResultPartition = Iterables.getOnlyElement(vertexB0.getConsumerPartitionGroups().get(0)); final Set<IntermediateResultPartitionID> secondPipelinedRegionConsumedResults = IterableUtils.toStream(secondPipelinedRegion.getConsumedResults()) .map(DefaultResultPartition::getId) .collect(Collectors.toSet()); assertThat(firstPipelinedRegion.getConsumedResults().iterator().hasNext(), is(false)); assertThat(secondPipelinedRegionConsumedResults, contains(b0ConsumedResultPartition)); }
Iterables.getOnlyElement(vertexB0.getConsumerPartitionGroups().get(0));
public void returnsIncidentBlockingPartitions() throws Exception { final JobVertex a = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex b = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex c = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex d = ExecutionGraphTestUtils.createNoOpVertex(1); final JobVertex e = ExecutionGraphTestUtils.createNoOpVertex(1); b.connectNewDataSetAsInput(a, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); c.connectNewDataSetAsInput(b, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); d.connectNewDataSetAsInput(b, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); e.connectNewDataSetAsInput(c, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); e.connectNewDataSetAsInput(d, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); final DefaultExecutionGraph simpleTestGraph = ExecutionGraphTestUtils.createSimpleTestGraph(a, b, c, d, e); final DefaultExecutionTopology topology = DefaultExecutionTopology.fromExecutionGraph(simpleTestGraph); final DefaultSchedulingPipelinedRegion firstPipelinedRegion = topology.getPipelinedRegionOfVertex(new ExecutionVertexID(a.getID(), 0)); final DefaultSchedulingPipelinedRegion secondPipelinedRegion = topology.getPipelinedRegionOfVertex(new ExecutionVertexID(e.getID(), 0)); final DefaultExecutionVertex vertexB0 = topology.getVertex(new ExecutionVertexID(b.getID(), 0)); final IntermediateResultPartitionID b0ConsumedResultPartition = Iterables.getOnlyElement(vertexB0.getConsumedResults()).getId(); final Set<IntermediateResultPartitionID> secondPipelinedRegionConsumedResults = IterableUtils.toStream(secondPipelinedRegion.getConsumedResults()) .map(DefaultResultPartition::getId) .collect(Collectors.toSet()); assertThat(firstPipelinedRegion.getConsumedResults().iterator().hasNext(), is(false)); assertThat(secondPipelinedRegionConsumedResults, contains(b0ConsumedResultPartition)); }
class DefaultSchedulingPipelinedRegionTest extends TestLogger { @Test public void gettingUnknownVertexThrowsException() { final DefaultSchedulingPipelinedRegion pipelinedRegion = new DefaultSchedulingPipelinedRegion(Collections.emptySet()); final ExecutionVertexID unknownVertexId = new ExecutionVertexID(new JobVertexID(), 0); try { pipelinedRegion.getVertex(unknownVertexId); fail("Expected exception not thrown"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString(unknownVertexId + " not found")); } } @Test public void returnsVertices() { final DefaultExecutionVertex vertex = new DefaultExecutionVertex( new ExecutionVertexID(new JobVertexID(), 0), Collections.emptyList(), () -> ExecutionState.CREATED); final Set<DefaultExecutionVertex> vertices = Collections.singleton(vertex); final DefaultSchedulingPipelinedRegion pipelinedRegion = new DefaultSchedulingPipelinedRegion(vertices); final Iterator<DefaultExecutionVertex> vertexIterator = pipelinedRegion.getVertices().iterator(); assertThat(vertexIterator.hasNext(), is(true)); assertThat(vertexIterator.next(), is(sameInstance(vertex))); assertThat(vertexIterator.hasNext(), is(false)); } /** * Tests if the consumed inputs of the pipelined regions are computed correctly using the Job * graph below. * * <pre> * c * / X * a -+- b e * \ / * d * </pre> * * <p>Pipelined regions: {a}, {b, c, d, e} */ @Test }
class DefaultSchedulingPipelinedRegionTest extends TestLogger { @Test public void gettingUnknownVertexThrowsException() { final DefaultSchedulingPipelinedRegion pipelinedRegion = new DefaultSchedulingPipelinedRegion(Collections.emptySet()); final ExecutionVertexID unknownVertexId = new ExecutionVertexID(new JobVertexID(), 0); try { pipelinedRegion.getVertex(unknownVertexId); fail("Expected exception not thrown"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString(unknownVertexId + " not found")); } } @Test public void returnsVertices() { final DefaultExecutionVertex vertex = new DefaultExecutionVertex( new ExecutionVertexID(new JobVertexID(), 0), Collections.emptyList(), () -> ExecutionState.CREATED); final Set<DefaultExecutionVertex> vertices = Collections.singleton(vertex); final DefaultSchedulingPipelinedRegion pipelinedRegion = new DefaultSchedulingPipelinedRegion(vertices); final Iterator<DefaultExecutionVertex> vertexIterator = pipelinedRegion.getVertices().iterator(); assertThat(vertexIterator.hasNext(), is(true)); assertThat(vertexIterator.next(), is(sameInstance(vertex))); assertThat(vertexIterator.hasNext(), is(false)); } /** * Tests if the consumed inputs of the pipelined regions are computed correctly using the Job * graph below. * * <pre> * c * / X * a -+- b e * \ / * d * </pre> * * <p>Pipelined regions: {a}, {b, c, d, e} */ @Test }
Because projection has put all join output columns(JoinOrder.java:312), maybe contains some unnecessary columns, we need prune again,
public OptExpression rewrite(OptExpression optExpression, ColumnRefSet requiredColumns) { Operator operator = optExpression.getOp(); if (operator.getProjection() != null && operator.getProjection().getColumnRefMap().size() > 1) { Projection projection = operator.getProjection(); List<ColumnRefOperator> outputColumns = Lists.newArrayList(); for (ColumnRefOperator key : projection.getColumnRefMap().keySet()) { if (requiredColumns.contains(key)) { outputColumns.add(key); } } if (outputColumns.size() == 0) { outputColumns.add(Utils.findSmallestColumnRef(projection.getOutputColumns())); } if (outputColumns.size() != projection.getColumnRefMap().size()) { Map<ColumnRefOperator, ScalarOperator> newOutputProjections = Maps.newHashMap(); for (ColumnRefOperator ref : outputColumns) { newOutputProjections.put(ref, projection.getColumnRefMap().get(ref)); } optExpression.getOp().setProjection(new Projection(newOutputProjections)); } for (ScalarOperator value : optExpression.getOp().getProjection().getColumnRefMap().values()) { requiredColumns.union(value.getUsedColumns()); } } return optExpression.getOp().accept(this, optExpression, requiredColumns); }
if (outputColumns.size() != projection.getColumnRefMap().size()) {
public OptExpression rewrite(OptExpression optExpression, ColumnRefSet requiredColumns) { Operator operator = optExpression.getOp(); if (operator.getProjection() != null && operator.getProjection().getColumnRefMap().size() > 1) { Projection projection = operator.getProjection(); List<ColumnRefOperator> outputColumns = Lists.newArrayList(); for (ColumnRefOperator key : projection.getColumnRefMap().keySet()) { if (requiredColumns.contains(key)) { outputColumns.add(key); } } if (outputColumns.size() == 0) { outputColumns.add(Utils.findSmallestColumnRef(projection.getOutputColumns())); } if (outputColumns.size() != projection.getColumnRefMap().size()) { Map<ColumnRefOperator, ScalarOperator> newOutputProjections = Maps.newHashMap(); for (ColumnRefOperator ref : outputColumns) { newOutputProjections.put(ref, projection.getColumnRefMap().get(ref)); } optExpression.getOp().setProjection(new Projection(newOutputProjections)); } for (ScalarOperator value : optExpression.getOp().getProjection().getColumnRefMap().values()) { requiredColumns.union(value.getUsedColumns()); } } return optExpression.getOp().accept(this, optExpression, requiredColumns); }
class OutputColumnsPrune extends OptExpressionVisitor<OptExpression, ColumnRefSet> { private final OptimizerContext optimizerContext; public OutputColumnsPrune(OptimizerContext optimizerContext) { this.optimizerContext = optimizerContext; } @Override public OptExpression visit(OptExpression optExpression, ColumnRefSet pruneOutputColumns) { return optExpression; } @Override public OptExpression visitLogicalJoin(OptExpression optExpression, ColumnRefSet requireColumns) { ColumnRefSet outputColumns = optExpression.getOutputColumns(); ColumnRefSet newOutputColumns = new ColumnRefSet(); for (int id : outputColumns.getColumnIds()) { if (requireColumns.contains(id)) { newOutputColumns.union(id); } } LogicalJoinOperator joinOperator = (LogicalJoinOperator) optExpression.getOp(); if (joinOperator.getProjection() == null && !newOutputColumns.isEmpty()) { joinOperator = new LogicalJoinOperator.Builder() .withOperator((LogicalJoinOperator) optExpression.getOp()) .setProjection(new Projection(newOutputColumns.getStream() .mapToObj(optimizerContext.getColumnRefFactory()::getColumnRef) .collect(Collectors.toMap(Function.identity(), Function.identity())), new HashMap<>())) .build(); } else if (joinOperator.getProjection() != null) { Preconditions.checkState( newOutputColumns.cardinality() >= joinOperator.getProjection().getColumnRefMap().size()); } requireColumns = ((LogicalJoinOperator) optExpression.getOp()).getRequiredChildInputColumns(); requireColumns.union(newOutputColumns); OptExpression left = rewrite(optExpression.inputAt(0), (ColumnRefSet) requireColumns.clone()); OptExpression right = rewrite(optExpression.inputAt(1), (ColumnRefSet) requireColumns.clone()); OptExpression joinOpt = OptExpression.create(joinOperator, Lists.newArrayList(left, right)); joinOpt.deriveLogicalPropertyItself(); ExpressionContext expressionContext = new ExpressionContext(joinOpt); StatisticsCalculator statisticsCalculator = new StatisticsCalculator( expressionContext, optimizerContext.getColumnRefFactory(), optimizerContext); statisticsCalculator.estimatorStats(); joinOpt.setStatistics(expressionContext.getStatistics()); return joinOpt; } }
class OutputColumnsPrune extends OptExpressionVisitor<OptExpression, ColumnRefSet> { private final OptimizerContext optimizerContext; public OutputColumnsPrune(OptimizerContext optimizerContext) { this.optimizerContext = optimizerContext; } @Override public OptExpression visit(OptExpression optExpression, ColumnRefSet pruneOutputColumns) { return optExpression; } @Override public OptExpression visitLogicalJoin(OptExpression optExpression, ColumnRefSet requireColumns) { ColumnRefSet outputColumns = optExpression.getOutputColumns(); ColumnRefSet newOutputColumns = new ColumnRefSet(); for (int id : outputColumns.getColumnIds()) { if (requireColumns.contains(id)) { newOutputColumns.union(id); } } LogicalJoinOperator joinOperator = (LogicalJoinOperator) optExpression.getOp(); if (joinOperator.getProjection() == null && !newOutputColumns.isEmpty()) { joinOperator = new LogicalJoinOperator.Builder() .withOperator((LogicalJoinOperator) optExpression.getOp()) .setProjection(new Projection(newOutputColumns.getStream() .mapToObj(optimizerContext.getColumnRefFactory()::getColumnRef) .collect(Collectors.toMap(Function.identity(), Function.identity())), new HashMap<>())) .build(); } else if (joinOperator.getProjection() != null) { Preconditions.checkState( newOutputColumns.cardinality() >= joinOperator.getProjection().getColumnRefMap().size()); } requireColumns = ((LogicalJoinOperator) optExpression.getOp()).getRequiredChildInputColumns(); requireColumns.union(newOutputColumns); OptExpression left = rewrite(optExpression.inputAt(0), (ColumnRefSet) requireColumns.clone()); OptExpression right = rewrite(optExpression.inputAt(1), (ColumnRefSet) requireColumns.clone()); OptExpression joinOpt = OptExpression.create(joinOperator, Lists.newArrayList(left, right)); joinOpt.deriveLogicalPropertyItself(); ExpressionContext expressionContext = new ExpressionContext(joinOpt); StatisticsCalculator statisticsCalculator = new StatisticsCalculator( expressionContext, optimizerContext.getColumnRefFactory(), optimizerContext); statisticsCalculator.estimatorStats(); joinOpt.setStatistics(expressionContext.getStatistics()); return joinOpt; } }
This should not be true unless disconnect is being called somewhere.
public void testGetSizeBytesWhenFileNotFoundBatchRetry() throws Exception { JsonFactory jsonFactory = new JacksonFactory(); String contentBoundary = "batch_foobarbaz"; String contentBoundaryLine = "--" + contentBoundary; String endOfContentBoundaryLine = "--" + contentBoundary + "--"; GenericJson error = new GenericJson().set("error", new GenericJson().set("code", 404)); error.setFactory(jsonFactory); String content = contentBoundaryLine + "\n" + "Content-Type: application/http\n" + "\n" + "HTTP/1.1 404 Not Found\n" + "Content-Length: -1\n" + "\n" + error.toString() + "\n" + "\n" + endOfContentBoundaryLine + "\n"; thrown.expect(FileNotFoundException.class); final LowLevelHttpResponse mockResponse = Mockito.mock(LowLevelHttpResponse.class); when(mockResponse.getContentType()).thenReturn("multipart/mixed; boundary=" + contentBoundary); when(mockResponse.getStatusCode()).thenReturn(429, 200); when(mockResponse.getContent()).thenReturn(toStream(content)); MockHttpTransport mockTransport = new MockHttpTransport.Builder() .setLowLevelHttpRequest( new MockLowLevelHttpRequest() { @Override public LowLevelHttpResponse execute() throws IOException { return mockResponse; } }) .build(); GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); gcsUtil.setStorageClient( new Storage(mockTransport, Transport.getJsonFactory(), new RetryHttpRequestInitializer())); gcsUtil.fileSizes(ImmutableList.of(GcsPath.fromComponents("testbucket", "testobject"))); }
public void testGetSizeBytesWhenFileNotFoundBatchRetry() throws Exception { JsonFactory jsonFactory = new JacksonFactory(); String contentBoundary = "batch_foobarbaz"; String contentBoundaryLine = "--" + contentBoundary; String endOfContentBoundaryLine = "--" + contentBoundary + "--"; GenericJson error = new GenericJson().set("error", new GenericJson().set("code", 404)); error.setFactory(jsonFactory); String content = contentBoundaryLine + "\n" + "Content-Type: application/http\n" + "\n" + "HTTP/1.1 404 Not Found\n" + "Content-Length: -1\n" + "\n" + error.toString() + "\n" + "\n" + endOfContentBoundaryLine + "\n"; thrown.expect(FileNotFoundException.class); final LowLevelHttpResponse[] mockResponses = new LowLevelHttpResponse[] { Mockito.mock(LowLevelHttpResponse.class), Mockito.mock(LowLevelHttpResponse.class), }; when(mockResponses[0].getContentType()).thenReturn("text/plain"); when(mockResponses[1].getContentType()) .thenReturn("multipart/mixed; boundary=" + contentBoundary); when(mockResponses[0].getStatusCode()).thenReturn(429); when(mockResponses[1].getStatusCode()).thenReturn(200); when(mockResponses[0].getContent()).thenReturn(toStream("error")); when(mockResponses[1].getContent()).thenReturn(toStream(content)); MockHttpTransport mockTransport = new MockHttpTransport.Builder() .setLowLevelHttpRequest( new MockLowLevelHttpRequest() { int index = 0; @Override public LowLevelHttpResponse execute() throws IOException { return mockResponses[index++]; } }) .build(); GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); gcsUtil.setStorageClient( new Storage(mockTransport, Transport.getJsonFactory(), new RetryHttpRequestInitializer())); gcsUtil.fileSizes(ImmutableList.of(GcsPath.fromComponents("testbucket", "testobject"))); }
class GcsUtilTest { @Rule public ExpectedException thrown = ExpectedException.none(); private static GcsOptions gcsOptionsWithTestCredential() { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); pipelineOptions.setGcpCredential(new TestCredential()); return pipelineOptions; } @Test public void testCreationWithDefaultOptions() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); assertNotNull(pipelineOptions.getGcpCredential()); } @Test public void testUploadBufferSizeDefault() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil util = pipelineOptions.getGcsUtil(); assertNull(util.getUploadBufferSizeBytes()); } @Test public void testUploadBufferSizeUserSpecified() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); pipelineOptions.setGcsUploadBufferSizeBytes(12345); GcsUtil util = pipelineOptions.getGcsUtil(); assertEquals((Integer) 12345, util.getUploadBufferSizeBytes()); } @Test public void testCreationWithExecutorServiceProvided() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); pipelineOptions.setExecutorService(Executors.newCachedThreadPool()); assertSame(pipelineOptions.getExecutorService(), pipelineOptions.getGcsUtil().executorService); } @Test public void testCreationWithGcsUtilProvided() { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); GcsUtil gcsUtil = Mockito.mock(GcsUtil.class); pipelineOptions.setGcsUtil(gcsUtil); assertSame(gcsUtil, pipelineOptions.getGcsUtil()); } @Test public void testMultipleThreadsCanCompleteOutOfOrderWithDefaultThreadPool() throws Exception { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); ExecutorService executorService = pipelineOptions.getExecutorService(); int numThreads = 100; final CountDownLatch[] countDownLatches = new CountDownLatch[numThreads]; for (int i = 0; i < numThreads; i++) { final int currentLatch = i; countDownLatches[i] = new CountDownLatch(1); executorService.execute( () -> { try { countDownLatches[currentLatch].await(); if (currentLatch > 0) { countDownLatches[currentLatch - 1].countDown(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } }); } countDownLatches[countDownLatches.length - 1].countDown(); executorService.shutdown(); assertTrue( "Expected tasks to complete", executorService.awaitTermination(10, TimeUnit.SECONDS)); } @Test public void testGlobExpansion() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class); Objects modelObjects = new Objects(); List<StorageObject> items = new ArrayList<>(); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file1name")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file2name")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file3name")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/otherfile")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/anotherfile")); modelObjects.setItems(items); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "testdirectory/otherfile")) .thenReturn(mockStorageGet); when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList); when(mockStorageGet.execute()) .thenReturn(new StorageObject().setBucket("testbucket").setName("testdirectory/otherfile")); when(mockStorageList.execute()).thenReturn(modelObjects); { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of(GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } } @Test public void testRecursiveGlobExpansion() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class); Objects modelObjects = new Objects(); List<StorageObject> items = new ArrayList<>(); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/file1.txt")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/file2.txt")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/file3.txt")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/otherfile")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/anotherfile")); items.add(new StorageObject().setBucket("testbucket").setName("test/file4.txt")); modelObjects.setItems(items); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "test/directory/otherfile")) .thenReturn(mockStorageGet); when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList); when(mockStorageGet.execute()) .thenReturn( new StorageObject().setBucket("testbucket").setName("test/directory/otherfile")); when(mockStorageList.execute()).thenReturn(modelObjects); { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } } @Test public void testNonExistentObjectReturnsEmptyResult() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); GcsPath pattern = GcsPath.fromUri("gs: GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_NOT_FOUND, "It don't exist", "Nothing here to see"); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get(pattern.getBucket(), pattern.getObject())) .thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); assertEquals(Collections.emptyList(), gcsUtil.expand(pattern)); } @Test public void testAccessDeniedObjectThrowsIOException() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); GcsPath pattern = GcsPath.fromUri("gs: GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get(pattern.getBucket(), pattern.getObject())) .thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); thrown.expect(IOException.class); thrown.expectMessage("Unable to get the file object for path"); gcsUtil.expand(pattern); } @Test public void testFileSizeNonBatch() throws Exception { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "testobject")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenReturn(new StorageObject().setSize(BigInteger.valueOf(1000))); assertEquals(1000, gcsUtil.fileSize(GcsPath.fromComponents("testbucket", "testobject"))); } @Test public void testFileSizeWhenFileNotFoundNonBatch() throws Exception { MockLowLevelHttpResponse notFoundResponse = new MockLowLevelHttpResponse(); notFoundResponse.setContent(""); notFoundResponse.setStatusCode(HttpStatusCodes.STATUS_CODE_NOT_FOUND); MockHttpTransport mockTransport = new MockHttpTransport.Builder().setLowLevelHttpResponse(notFoundResponse).build(); GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); gcsUtil.setStorageClient(new Storage(mockTransport, Transport.getJsonFactory(), null)); thrown.expect(FileNotFoundException.class); gcsUtil.fileSize(GcsPath.fromComponents("testbucket", "testobject")); } @Test public void testRetryFileSizeNonBatch() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.withMaxRetries(2).backoff()); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "testobject")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new StorageObject().setSize(BigInteger.valueOf(1000))); assertEquals( 1000, gcsUtil .getObject( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper()) .getSize() .longValue()); assertEquals(BackOff.STOP, mockBackOff.nextBackOffMillis()); } @Test public void testGetSizeBytesWhenFileNotFoundBatch() throws Exception { JsonFactory jsonFactory = new JacksonFactory(); String contentBoundary = "batch_foobarbaz"; String contentBoundaryLine = "--" + contentBoundary; String endOfContentBoundaryLine = "--" + contentBoundary + "--"; GenericJson error = new GenericJson().set("error", new GenericJson().set("code", 404)); error.setFactory(jsonFactory); String content = contentBoundaryLine + "\n" + "Content-Type: application/http\n" + "\n" + "HTTP/1.1 404 Not Found\n" + "Content-Length: -1\n" + "\n" + error.toString() + "\n" + "\n" + endOfContentBoundaryLine + "\n"; thrown.expect(FileNotFoundException.class); MockLowLevelHttpResponse notFoundResponse = new MockLowLevelHttpResponse() .setContentType("multipart/mixed; boundary=" + contentBoundary) .setContent(content) .setStatusCode(HttpStatusCodes.STATUS_CODE_OK); MockHttpTransport mockTransport = new MockHttpTransport.Builder().setLowLevelHttpResponse(notFoundResponse).build(); GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); gcsUtil.setStorageClient(new Storage(mockTransport, Transport.getJsonFactory(), null)); gcsUtil.fileSizes(ImmutableList.of(GcsPath.fromComponents("testbucket", "testobject"))); } @Test @Test public void testRemoveWhenFileNotFound() throws Exception { JsonFactory jsonFactory = new JacksonFactory(); String contentBoundary = "batch_foobarbaz"; String contentBoundaryLine = "--" + contentBoundary; String endOfContentBoundaryLine = "--" + contentBoundary + "--"; GenericJson error = new GenericJson().set("error", new GenericJson().set("code", 404)); error.setFactory(jsonFactory); String content = contentBoundaryLine + "\n" + "Content-Type: application/http\n" + "\n" + "HTTP/1.1 404 Not Found\n" + "Content-Length: -1\n" + "\n" + error.toString() + "\n" + "\n" + endOfContentBoundaryLine + "\n"; final LowLevelHttpResponse mockResponse = Mockito.mock(LowLevelHttpResponse.class); when(mockResponse.getContentType()).thenReturn("multipart/mixed; boundary=" + contentBoundary); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getContent()).thenReturn(toStream(content)); MockLowLevelHttpRequest request = new MockLowLevelHttpRequest() { @Override public LowLevelHttpResponse execute() throws IOException { return mockResponse; } }; MockHttpTransport mockTransport = new MockHttpTransport.Builder().setLowLevelHttpRequest(request).build(); GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); gcsUtil.setStorageClient( new Storage(mockTransport, Transport.getJsonFactory(), new RetryHttpRequestInitializer())); gcsUtil.remove(Arrays.asList("gs: } @Test public void testCreateBucket() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets.Insert mockStorageInsert = Mockito.mock(Storage.Buckets.Insert.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert(any(String.class), any(Bucket.class))) .thenReturn(mockStorageInsert); when(mockStorageInsert.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); gcsUtil.createBucket("a", new Bucket(), mockBackOff, new FastNanoClockAndSleeper()); } @Test public void testCreateBucketAccessErrors() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Insert mockStorageInsert = Mockito.mock(Storage.Buckets.Insert.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert(any(String.class), any(Bucket.class))) .thenReturn(mockStorageInsert); when(mockStorageInsert.execute()).thenThrow(expectedException); thrown.expect(AccessDeniedException.class); gcsUtil.createBucket("a", new Bucket(), mockBackOff, new FastNanoClockAndSleeper()); } @Test public void testBucketAccessible() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); assertTrue( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testBucketDoesNotExistBecauseOfAccessError() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); assertFalse( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testBucketDoesNotExist() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow( googleJsonResponseException( HttpStatusCodes.STATUS_CODE_NOT_FOUND, "It don't exist", "Nothing here to see")); assertFalse( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testGetBucket() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); assertNotNull( gcsUtil.getBucket( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testGetBucketNotExists() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow( googleJsonResponseException( HttpStatusCodes.STATUS_CODE_NOT_FOUND, "It don't exist", "Nothing here to see")); thrown.expect(FileNotFoundException.class); thrown.expectMessage("It don't exist"); gcsUtil.getBucket( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper()); } @Test public void testGCSChannelCloseIdempotent() throws IOException { GoogleCloudStorageReadOptions readOptions = GoogleCloudStorageReadOptions.builder().setFastFailOnNotFound(false).build(); SeekableByteChannel channel = new GoogleCloudStorageReadChannel( null, "dummybucket", "dummyobject", null, new ClientRequestHelper<>(), readOptions); channel.close(); channel.close(); } /** Builds a fake GoogleJsonResponseException for testing API error handling. */ private static GoogleJsonResponseException googleJsonResponseException( final int status, final String reason, final String message) throws IOException { final JsonFactory jsonFactory = new JacksonFactory(); HttpTransport transport = new MockHttpTransport() { @Override public LowLevelHttpRequest buildRequest(String method, String url) throws IOException { ErrorInfo errorInfo = new ErrorInfo(); errorInfo.setReason(reason); errorInfo.setMessage(message); errorInfo.setFactory(jsonFactory); GenericJson error = new GenericJson(); error.set("code", status); error.set("errors", Arrays.asList(errorInfo)); error.setFactory(jsonFactory); GenericJson errorResponse = new GenericJson(); errorResponse.set("error", error); errorResponse.setFactory(jsonFactory); return new MockLowLevelHttpRequest() .setResponse( new MockLowLevelHttpResponse() .setContent(errorResponse.toPrettyString()) .setContentType(Json.MEDIA_TYPE) .setStatusCode(status)); } }; HttpRequest request = transport.createRequestFactory().buildGetRequest(HttpTesting.SIMPLE_GENERIC_URL); request.setThrowExceptionOnExecuteError(false); HttpResponse response = request.execute(); return GoogleJsonResponseException.from(jsonFactory, response); } private static List<String> makeStrings(String s, int n) { ImmutableList.Builder<String> ret = ImmutableList.builder(); for (int i = 0; i < n; ++i) { ret.add(String.format("gs: } return ret.build(); } private static List<GcsPath> makeGcsPaths(String s, int n) { ImmutableList.Builder<GcsPath> ret = ImmutableList.builder(); for (int i = 0; i < n; ++i) { ret.add(GcsPath.fromUri(String.format("gs: } return ret.build(); } private static int sumBatchSizes(List<BatchRequest> batches) { int ret = 0; for (BatchRequest b : batches) { ret += b.size(); assertThat(b.size(), greaterThan(0)); } return ret; } @Test public void testMakeRewriteOps() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = gcsOptions.getGcsUtil(); LinkedList<RewriteOp> rewrites = gcsUtil.makeRewriteOps(makeStrings("s", 1), makeStrings("d", 1)); assertEquals(1, rewrites.size()); RewriteOp rewrite = rewrites.pop(); assertTrue(rewrite.getReadyToEnqueue()); Storage.Objects.Rewrite request = rewrite.rewriteRequest; assertNull(request.getMaxBytesRewrittenPerCall()); assertEquals("bucket", request.getSourceBucket()); assertEquals("s0", request.getSourceObject()); assertEquals("bucket", request.getDestinationBucket()); assertEquals("d0", request.getDestinationObject()); } @Test public void testMakeRewriteOpsWithOptions() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = gcsOptions.getGcsUtil(); gcsUtil.maxBytesRewrittenPerCall = 1337L; LinkedList<RewriteOp> rewrites = gcsUtil.makeRewriteOps(makeStrings("s", 1), makeStrings("d", 1)); assertEquals(1, rewrites.size()); RewriteOp rewrite = rewrites.pop(); assertTrue(rewrite.getReadyToEnqueue()); Storage.Objects.Rewrite request = rewrite.rewriteRequest; assertEquals(Long.valueOf(1337L), request.getMaxBytesRewrittenPerCall()); } @Test public void testMakeCopyBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); List<BatchRequest> batches = gcsUtil.makeCopyBatches(gcsUtil.makeRewriteOps(makeStrings("s", 3), makeStrings("d", 3))); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); batches = gcsUtil.makeCopyBatches( gcsUtil.makeRewriteOps(makeStrings("s", 100), makeStrings("d", 100))); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); batches = gcsUtil.makeCopyBatches( gcsUtil.makeRewriteOps(makeStrings("s", 501), makeStrings("d", 501))); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); } @Test public void testMakeRewriteOpsInvalid() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Number of source files 3"); gcsUtil.makeRewriteOps(makeStrings("s", 3), makeStrings("d", 1)); } @Test public void testMakeRemoveBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); List<BatchRequest> batches = gcsUtil.makeRemoveBatches(makeStrings("s", 3)); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); batches = gcsUtil.makeRemoveBatches(makeStrings("s", 100)); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); batches = gcsUtil.makeRemoveBatches(makeStrings("s", 501)); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); } @Test public void testMakeGetBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); List<StorageObjectOrIOException[]> results = Lists.newArrayList(); List<BatchRequest> batches = gcsUtil.makeGetBatches(makeGcsPaths("s", 3), results); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); assertEquals(3, results.size()); results = Lists.newArrayList(); batches = gcsUtil.makeGetBatches(makeGcsPaths("s", 100), results); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); assertEquals(100, results.size()); results = Lists.newArrayList(); batches = gcsUtil.makeGetBatches(makeGcsPaths("s", 501), results); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); assertEquals(501, results.size()); } /** A helper to wrap a {@link GenericJson} object in a content stream. */ private static InputStream toStream(String content) throws IOException { return new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); } }
class GcsUtilTest { @Rule public ExpectedException thrown = ExpectedException.none(); private static GcsOptions gcsOptionsWithTestCredential() { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); pipelineOptions.setGcpCredential(new TestCredential()); return pipelineOptions; } @Test public void testCreationWithDefaultOptions() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); assertNotNull(pipelineOptions.getGcpCredential()); } @Test public void testUploadBufferSizeDefault() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil util = pipelineOptions.getGcsUtil(); assertNull(util.getUploadBufferSizeBytes()); } @Test public void testUploadBufferSizeUserSpecified() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); pipelineOptions.setGcsUploadBufferSizeBytes(12345); GcsUtil util = pipelineOptions.getGcsUtil(); assertEquals((Integer) 12345, util.getUploadBufferSizeBytes()); } @Test public void testCreationWithExecutorServiceProvided() { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); pipelineOptions.setExecutorService(Executors.newCachedThreadPool()); assertSame(pipelineOptions.getExecutorService(), pipelineOptions.getGcsUtil().executorService); } @Test public void testCreationWithGcsUtilProvided() { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); GcsUtil gcsUtil = Mockito.mock(GcsUtil.class); pipelineOptions.setGcsUtil(gcsUtil); assertSame(gcsUtil, pipelineOptions.getGcsUtil()); } @Test public void testMultipleThreadsCanCompleteOutOfOrderWithDefaultThreadPool() throws Exception { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); ExecutorService executorService = pipelineOptions.getExecutorService(); int numThreads = 100; final CountDownLatch[] countDownLatches = new CountDownLatch[numThreads]; for (int i = 0; i < numThreads; i++) { final int currentLatch = i; countDownLatches[i] = new CountDownLatch(1); executorService.execute( () -> { try { countDownLatches[currentLatch].await(); if (currentLatch > 0) { countDownLatches[currentLatch - 1].countDown(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } }); } countDownLatches[countDownLatches.length - 1].countDown(); executorService.shutdown(); assertTrue( "Expected tasks to complete", executorService.awaitTermination(10, TimeUnit.SECONDS)); } @Test public void testGlobExpansion() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class); Objects modelObjects = new Objects(); List<StorageObject> items = new ArrayList<>(); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file1name")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file2name")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file3name")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/otherfile")); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/anotherfile")); modelObjects.setItems(items); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "testdirectory/otherfile")) .thenReturn(mockStorageGet); when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList); when(mockStorageGet.execute()) .thenReturn(new StorageObject().setBucket("testbucket").setName("testdirectory/otherfile")); when(mockStorageList.execute()).thenReturn(modelObjects); { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of(GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } } @Test public void testRecursiveGlobExpansion() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class); Objects modelObjects = new Objects(); List<StorageObject> items = new ArrayList<>(); items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/file1.txt")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/file2.txt")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/file3.txt")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/otherfile")); items.add(new StorageObject().setBucket("testbucket").setName("test/directory/anotherfile")); items.add(new StorageObject().setBucket("testbucket").setName("test/file4.txt")); modelObjects.setItems(items); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "test/directory/otherfile")) .thenReturn(mockStorageGet); when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList); when(mockStorageGet.execute()) .thenReturn( new StorageObject().setBucket("testbucket").setName("test/directory/otherfile")); when(mockStorageList.execute()).thenReturn(modelObjects); { GcsPath pattern = GcsPath.fromUri("gs: List<GcsPath> expectedFiles = ImmutableList.of( GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: GcsPath.fromUri("gs: assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray())); } } @Test public void testNonExistentObjectReturnsEmptyResult() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); GcsPath pattern = GcsPath.fromUri("gs: GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_NOT_FOUND, "It don't exist", "Nothing here to see"); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get(pattern.getBucket(), pattern.getObject())) .thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); assertEquals(Collections.emptyList(), gcsUtil.expand(pattern)); } @Test public void testAccessDeniedObjectThrowsIOException() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); GcsPath pattern = GcsPath.fromUri("gs: GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get(pattern.getBucket(), pattern.getObject())) .thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); thrown.expect(IOException.class); thrown.expectMessage("Unable to get the file object for path"); gcsUtil.expand(pattern); } @Test public void testFileSizeNonBatch() throws Exception { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "testobject")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenReturn(new StorageObject().setSize(BigInteger.valueOf(1000))); assertEquals(1000, gcsUtil.fileSize(GcsPath.fromComponents("testbucket", "testobject"))); } @Test public void testFileSizeWhenFileNotFoundNonBatch() throws Exception { MockLowLevelHttpResponse notFoundResponse = new MockLowLevelHttpResponse(); notFoundResponse.setContent(""); notFoundResponse.setStatusCode(HttpStatusCodes.STATUS_CODE_NOT_FOUND); MockHttpTransport mockTransport = new MockHttpTransport.Builder().setLowLevelHttpResponse(notFoundResponse).build(); GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); gcsUtil.setStorageClient(new Storage(mockTransport, Transport.getJsonFactory(), null)); thrown.expect(FileNotFoundException.class); gcsUtil.fileSize(GcsPath.fromComponents("testbucket", "testobject")); } @Test public void testRetryFileSizeNonBatch() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.withMaxRetries(2).backoff()); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket", "testobject")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new StorageObject().setSize(BigInteger.valueOf(1000))); assertEquals( 1000, gcsUtil .getObject( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper()) .getSize() .longValue()); assertEquals(BackOff.STOP, mockBackOff.nextBackOffMillis()); } @Test public void testGetSizeBytesWhenFileNotFoundBatch() throws Exception { JsonFactory jsonFactory = new JacksonFactory(); String contentBoundary = "batch_foobarbaz"; String contentBoundaryLine = "--" + contentBoundary; String endOfContentBoundaryLine = "--" + contentBoundary + "--"; GenericJson error = new GenericJson().set("error", new GenericJson().set("code", 404)); error.setFactory(jsonFactory); String content = contentBoundaryLine + "\n" + "Content-Type: application/http\n" + "\n" + "HTTP/1.1 404 Not Found\n" + "Content-Length: -1\n" + "\n" + error.toString() + "\n" + "\n" + endOfContentBoundaryLine + "\n"; thrown.expect(FileNotFoundException.class); MockLowLevelHttpResponse notFoundResponse = new MockLowLevelHttpResponse() .setContentType("multipart/mixed; boundary=" + contentBoundary) .setContent(content) .setStatusCode(HttpStatusCodes.STATUS_CODE_OK); MockHttpTransport mockTransport = new MockHttpTransport.Builder().setLowLevelHttpResponse(notFoundResponse).build(); GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); gcsUtil.setStorageClient(new Storage(mockTransport, Transport.getJsonFactory(), null)); gcsUtil.fileSizes(ImmutableList.of(GcsPath.fromComponents("testbucket", "testobject"))); } @Test @Test public void testRemoveWhenFileNotFound() throws Exception { JsonFactory jsonFactory = new JacksonFactory(); String contentBoundary = "batch_foobarbaz"; String contentBoundaryLine = "--" + contentBoundary; String endOfContentBoundaryLine = "--" + contentBoundary + "--"; GenericJson error = new GenericJson().set("error", new GenericJson().set("code", 404)); error.setFactory(jsonFactory); String content = contentBoundaryLine + "\n" + "Content-Type: application/http\n" + "\n" + "HTTP/1.1 404 Not Found\n" + "Content-Length: -1\n" + "\n" + error.toString() + "\n" + "\n" + endOfContentBoundaryLine + "\n"; final LowLevelHttpResponse mockResponse = Mockito.mock(LowLevelHttpResponse.class); when(mockResponse.getContentType()).thenReturn("multipart/mixed; boundary=" + contentBoundary); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getContent()).thenReturn(toStream(content)); MockLowLevelHttpRequest request = new MockLowLevelHttpRequest() { @Override public LowLevelHttpResponse execute() throws IOException { return mockResponse; } }; MockHttpTransport mockTransport = new MockHttpTransport.Builder().setLowLevelHttpRequest(request).build(); GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); gcsUtil.setStorageClient( new Storage(mockTransport, Transport.getJsonFactory(), new RetryHttpRequestInitializer())); gcsUtil.remove(Arrays.asList("gs: } @Test public void testCreateBucket() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets.Insert mockStorageInsert = Mockito.mock(Storage.Buckets.Insert.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert(any(String.class), any(Bucket.class))) .thenReturn(mockStorageInsert); when(mockStorageInsert.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); gcsUtil.createBucket("a", new Bucket(), mockBackOff, new FastNanoClockAndSleeper()); } @Test public void testCreateBucketAccessErrors() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Insert mockStorageInsert = Mockito.mock(Storage.Buckets.Insert.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert(any(String.class), any(Bucket.class))) .thenReturn(mockStorageInsert); when(mockStorageInsert.execute()).thenThrow(expectedException); thrown.expect(AccessDeniedException.class); gcsUtil.createBucket("a", new Bucket(), mockBackOff, new FastNanoClockAndSleeper()); } @Test public void testBucketAccessible() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); assertTrue( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testBucketDoesNotExistBecauseOfAccessError() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); assertFalse( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testBucketDoesNotExist() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow( googleJsonResponseException( HttpStatusCodes.STATUS_CODE_NOT_FOUND, "It don't exist", "Nothing here to see")); assertFalse( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testGetBucket() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); assertNotNull( gcsUtil.getBucket( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); } @Test public void testGetBucketNotExists() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow( googleJsonResponseException( HttpStatusCodes.STATUS_CODE_NOT_FOUND, "It don't exist", "Nothing here to see")); thrown.expect(FileNotFoundException.class); thrown.expectMessage("It don't exist"); gcsUtil.getBucket( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper()); } @Test public void testGCSChannelCloseIdempotent() throws IOException { GoogleCloudStorageReadOptions readOptions = GoogleCloudStorageReadOptions.builder().setFastFailOnNotFound(false).build(); SeekableByteChannel channel = new GoogleCloudStorageReadChannel( null, "dummybucket", "dummyobject", null, new ClientRequestHelper<>(), readOptions); channel.close(); channel.close(); } /** Builds a fake GoogleJsonResponseException for testing API error handling. */ private static GoogleJsonResponseException googleJsonResponseException( final int status, final String reason, final String message) throws IOException { final JsonFactory jsonFactory = new JacksonFactory(); HttpTransport transport = new MockHttpTransport() { @Override public LowLevelHttpRequest buildRequest(String method, String url) throws IOException { ErrorInfo errorInfo = new ErrorInfo(); errorInfo.setReason(reason); errorInfo.setMessage(message); errorInfo.setFactory(jsonFactory); GenericJson error = new GenericJson(); error.set("code", status); error.set("errors", Arrays.asList(errorInfo)); error.setFactory(jsonFactory); GenericJson errorResponse = new GenericJson(); errorResponse.set("error", error); errorResponse.setFactory(jsonFactory); return new MockLowLevelHttpRequest() .setResponse( new MockLowLevelHttpResponse() .setContent(errorResponse.toPrettyString()) .setContentType(Json.MEDIA_TYPE) .setStatusCode(status)); } }; HttpRequest request = transport.createRequestFactory().buildGetRequest(HttpTesting.SIMPLE_GENERIC_URL); request.setThrowExceptionOnExecuteError(false); HttpResponse response = request.execute(); return GoogleJsonResponseException.from(jsonFactory, response); } private static List<String> makeStrings(String s, int n) { ImmutableList.Builder<String> ret = ImmutableList.builder(); for (int i = 0; i < n; ++i) { ret.add(String.format("gs: } return ret.build(); } private static List<GcsPath> makeGcsPaths(String s, int n) { ImmutableList.Builder<GcsPath> ret = ImmutableList.builder(); for (int i = 0; i < n; ++i) { ret.add(GcsPath.fromUri(String.format("gs: } return ret.build(); } private static int sumBatchSizes(List<BatchRequest> batches) { int ret = 0; for (BatchRequest b : batches) { ret += b.size(); assertThat(b.size(), greaterThan(0)); } return ret; } @Test public void testMakeRewriteOps() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = gcsOptions.getGcsUtil(); LinkedList<RewriteOp> rewrites = gcsUtil.makeRewriteOps(makeStrings("s", 1), makeStrings("d", 1)); assertEquals(1, rewrites.size()); RewriteOp rewrite = rewrites.pop(); assertTrue(rewrite.getReadyToEnqueue()); Storage.Objects.Rewrite request = rewrite.rewriteRequest; assertNull(request.getMaxBytesRewrittenPerCall()); assertEquals("bucket", request.getSourceBucket()); assertEquals("s0", request.getSourceObject()); assertEquals("bucket", request.getDestinationBucket()); assertEquals("d0", request.getDestinationObject()); } @Test public void testMakeRewriteOpsWithOptions() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = gcsOptions.getGcsUtil(); gcsUtil.maxBytesRewrittenPerCall = 1337L; LinkedList<RewriteOp> rewrites = gcsUtil.makeRewriteOps(makeStrings("s", 1), makeStrings("d", 1)); assertEquals(1, rewrites.size()); RewriteOp rewrite = rewrites.pop(); assertTrue(rewrite.getReadyToEnqueue()); Storage.Objects.Rewrite request = rewrite.rewriteRequest; assertEquals(Long.valueOf(1337L), request.getMaxBytesRewrittenPerCall()); } @Test public void testMakeCopyBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); List<BatchRequest> batches = gcsUtil.makeCopyBatches(gcsUtil.makeRewriteOps(makeStrings("s", 3), makeStrings("d", 3))); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); batches = gcsUtil.makeCopyBatches( gcsUtil.makeRewriteOps(makeStrings("s", 100), makeStrings("d", 100))); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); batches = gcsUtil.makeCopyBatches( gcsUtil.makeRewriteOps(makeStrings("s", 501), makeStrings("d", 501))); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); } @Test public void testMakeRewriteOpsInvalid() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Number of source files 3"); gcsUtil.makeRewriteOps(makeStrings("s", 3), makeStrings("d", 1)); } @Test public void testMakeRemoveBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); List<BatchRequest> batches = gcsUtil.makeRemoveBatches(makeStrings("s", 3)); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); batches = gcsUtil.makeRemoveBatches(makeStrings("s", 100)); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); batches = gcsUtil.makeRemoveBatches(makeStrings("s", 501)); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); } @Test public void testMakeGetBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); List<StorageObjectOrIOException[]> results = Lists.newArrayList(); List<BatchRequest> batches = gcsUtil.makeGetBatches(makeGcsPaths("s", 3), results); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); assertEquals(3, results.size()); results = Lists.newArrayList(); batches = gcsUtil.makeGetBatches(makeGcsPaths("s", 100), results); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); assertEquals(100, results.size()); results = Lists.newArrayList(); batches = gcsUtil.makeGetBatches(makeGcsPaths("s", 501), results); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); assertEquals(501, results.size()); } /** A helper to wrap a {@link GenericJson} object in a content stream. */ private static InputStream toStream(String content) throws IOException { return new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); } }
I was thinking after `task.invoke()` but I guess this is working as well.
public void testProcessWithUnAvailableOutput() throws Exception { final long sleepTimeOutsideMail = 42; final long sleepTimeInsideMail = 44; @Nullable WaitingThread waitingThread = null; try (final MockEnvironment environment = setupEnvironment(true, false)) { final int numberOfProcessCalls = 10; final AvailabilityTestInputProcessor inputProcessor = new AvailabilityTestInputProcessor(numberOfProcessCalls); final StreamTask task = new MockStreamTaskBuilder(environment) .setStreamInputProcessor(inputProcessor) .build(); final MailboxExecutor executor = task.mailboxProcessor.getMainMailboxExecutor(); final RunnableWithException completeFutureTask = () -> { assertEquals(1, inputProcessor.currentNumProcessCalls); assertTrue(task.mailboxProcessor.isDefaultActionUnavailable()); environment.getWriter(1).getAvailableFuture().complete(null); }; waitingThread = new WaitingThread( executor, completeFutureTask, sleepTimeInsideMail, sleepTimeOutsideMail); executor.submit( waitingThread::start, "This task will submit another task to execute after processing input once."); long startTs = System.currentTimeMillis(); TaskIOMetricGroup ioMetricGroup = task.getEnvironment().getMetricGroup().getIOMetricGroup(); task.invoke(); long totalDuration = System.currentTimeMillis() - startTs; assertThat( ioMetricGroup.getBackPressuredTimePerSecond().getCount(), Matchers.greaterThanOrEqualTo(sleepTimeOutsideMail)); assertThat( ioMetricGroup.getBackPressuredTimePerSecond().getCount(), Matchers.lessThanOrEqualTo(totalDuration - sleepTimeInsideMail)); assertThat(ioMetricGroup.getIdleTimeMsPerSecond().getCount(), is(0L)); assertEquals(numberOfProcessCalls, inputProcessor.currentNumProcessCalls); } finally { if (waitingThread != null) { waitingThread.join(); } } }
if (waitingThread != null) {
public void testProcessWithUnAvailableOutput() throws Exception { final long sleepTimeOutsideMail = 42; final long sleepTimeInsideMail = 44; @Nullable WaitingThread waitingThread = null; try (final MockEnvironment environment = setupEnvironment(true, false)) { final int numberOfProcessCalls = 10; final AvailabilityTestInputProcessor inputProcessor = new AvailabilityTestInputProcessor(numberOfProcessCalls); final StreamTask task = new MockStreamTaskBuilder(environment) .setStreamInputProcessor(inputProcessor) .build(); final MailboxExecutor executor = task.mailboxProcessor.getMainMailboxExecutor(); final RunnableWithException completeFutureTask = () -> { assertEquals(1, inputProcessor.currentNumProcessCalls); assertTrue(task.mailboxProcessor.isDefaultActionUnavailable()); environment.getWriter(1).getAvailableFuture().complete(null); }; waitingThread = new WaitingThread( executor, completeFutureTask, sleepTimeInsideMail, sleepTimeOutsideMail); executor.submit( waitingThread::start, "This task will submit another task to execute after processing input once."); long startTs = System.currentTimeMillis(); TaskIOMetricGroup ioMetricGroup = task.getEnvironment().getMetricGroup().getIOMetricGroup(); task.invoke(); long totalDuration = System.currentTimeMillis() - startTs; assertThat( ioMetricGroup.getBackPressuredTimePerSecond().getCount(), Matchers.greaterThanOrEqualTo(sleepTimeOutsideMail)); assertThat( ioMetricGroup.getBackPressuredTimePerSecond().getCount(), Matchers.lessThanOrEqualTo(totalDuration - sleepTimeInsideMail)); assertThat(ioMetricGroup.getIdleTimeMsPerSecond().getCount(), is(0L)); assertEquals(numberOfProcessCalls, inputProcessor.currentNumProcessCalls); } finally { if (waitingThread != null) { waitingThread.join(); } } }
class WaitingThread extends Thread { private final MailboxExecutor executor; private final RunnableWithException resumeTask; private final long sleepTimeInsideMail; private final long sleepTimeOutsideMail; @Nullable private Exception asyncException; public WaitingThread( MailboxExecutor executor, RunnableWithException resumeTask, long sleepTimeInsideMail, long sleepTimeOutsideMail) { this.executor = executor; this.resumeTask = resumeTask; this.sleepTimeInsideMail = sleepTimeInsideMail; this.sleepTimeOutsideMail = sleepTimeOutsideMail; } @Override public void run() { try { Thread.sleep(sleepTimeOutsideMail); } catch (InterruptedException e) { asyncException = e; } executor.submit( () -> { if (asyncException != null) { throw asyncException; } Thread.sleep(sleepTimeInsideMail); resumeTask.run(); }, "This task will complete the future to resume process input action."); } }
class WaitingThread extends Thread { private final MailboxExecutor executor; private final RunnableWithException resumeTask; private final long sleepTimeInsideMail; private final long sleepTimeOutsideMail; @Nullable private Exception asyncException; public WaitingThread( MailboxExecutor executor, RunnableWithException resumeTask, long sleepTimeInsideMail, long sleepTimeOutsideMail) { this.executor = executor; this.resumeTask = resumeTask; this.sleepTimeInsideMail = sleepTimeInsideMail; this.sleepTimeOutsideMail = sleepTimeOutsideMail; } @Override public void run() { try { Thread.sleep(sleepTimeOutsideMail); } catch (InterruptedException e) { asyncException = e; } executor.submit( () -> { if (asyncException != null) { throw asyncException; } Thread.sleep(sleepTimeInsideMail); resumeTask.run(); }, "This task will complete the future to resume process input action."); } }
We can not assert just one file, This is because `ParallelFiniteTestSource` may spread data across multiple checkpoints.
public void testNonPartition() throws Exception { tEnv().executeSql("CREATE TABLE sink_table (a int, b string, c string) with (" + options() + ")"); tEnv().executeSql("insert into sink_table select * from my_table").await(); List<Row> results = toListAndClose(tEnv().executeSql("select * from sink_table").collect()); results.sort(Comparator.comparingInt(o -> (Integer) o.getField(0))); assertEquals(rows, results); File[] files = new File(URI.create(resultPath)).listFiles( (dir, name) -> name.startsWith("compacted-part-")); assertEquals(Arrays.toString(files), 1, files.length); String fileName = files[0].getName(); assertTrue(fileName, fileName.startsWith("compacted-part-")); }
assertEquals(Arrays.toString(files), 1, files.length);
public void testNonPartition() throws Exception { tEnv().executeSql("CREATE TABLE sink_table (a int, b string, c string) with (" + options() + ")"); tEnv().executeSql("insert into sink_table select * from my_table").await(); assertIterator(tEnv().executeSql("select * from sink_table").collect()); assertFiles(new File(URI.create(resultPath)).listFiles(), false); }
class FileCompactionITCaseBase extends StreamingTestBase { @Rule public Timeout timeoutPerTest = Timeout.seconds(60); private String resultPath; private List<Row> rows; @Before public void init() throws IOException { resultPath = tempFolder().newFolder().toURI().toString(); clear(); env().setParallelism(3); env().enableCheckpointing(100); rows = new ArrayList<>(); for (int i = 0; i < 100; i++) { rows.add(Row.of(i, String.valueOf(i % 10), String.valueOf(i))); } DataStream<Row> stream = new DataStream<>(env().getJavaEnv().addSource( new ParallelFiniteTestSource<>(rows), new RowTypeInfo( new TypeInformation[] {Types.INT, Types.STRING, Types.STRING}, new String[] {"a", "b", "c"}))); tEnv().createTemporaryView("my_table", stream); } @After public void clear() throws IOException { FileUtils.deleteDirectory(new File(URI.create(resultPath))); } protected abstract String format(); @Test @Test public void testPartition() throws Exception { tEnv().executeSql("CREATE TABLE sink_table (a int, b string, c string) partitioned by (b) with (" + options() + ")"); tEnv().executeSql("insert into sink_table select * from my_table").await(); List<Row> results = toListAndClose(tEnv().executeSql("select * from sink_table").collect()); results.sort(Comparator.comparingInt(o -> (Integer) o.getField(0))); assertEquals(rows, results); File path = new File(URI.create(resultPath)); assertEquals(10, path.listFiles().length); for (int i = 0; i < 10; i++) { File partition = new File(path, "b=" + i); File[] files = partition.listFiles(); assertEquals(Arrays.toString(files), 2, files.length); assertEquals(1, partition.list((dir, name) -> name.equals("_SUCCESS")).length); assertEquals(1, partition.list((dir, name) -> name.startsWith("compacted-part-")).length); } } private String options() { return "'connector'='filesystem'," + "'sink.partition-commit.policy.kind'='success-file'," + "'auto-compaction'='true'," + "'compaction.file-size' = '128MB'," + "'sink.rolling-policy.file-size' = '1b'," + kv("format", format()) + "," + kv("path", resultPath); } private String kv(String key, String value) { return String.format("'%s'='%s'", key, value); } private List<Row> toListAndClose(CloseableIterator<Row> iterator) throws Exception { List<Row> rows = CollectionUtil.iteratorToList(iterator); iterator.close(); return rows; } }
class FileCompactionITCaseBase extends StreamingTestBase { @Rule public Timeout timeoutPerTest = Timeout.seconds(60); private String resultPath; private List<Row> expectedRows; @Before public void init() throws IOException { resultPath = tempFolder().newFolder().toURI().toString(); env().setParallelism(3); env().enableCheckpointing(100); List<Row> rows = new ArrayList<>(); for (int i = 0; i < 100; i++) { rows.add(Row.of(i, String.valueOf(i % 10), String.valueOf(i))); } this.expectedRows = new ArrayList<>(); this.expectedRows.addAll(rows); this.expectedRows.addAll(rows); this.expectedRows.sort(Comparator.comparingInt(o -> (Integer) o.getField(0))); DataStream<Row> stream = new DataStream<>(env().getJavaEnv().addSource( new ParallelFiniteTestSource<>(rows), new RowTypeInfo( new TypeInformation[] {Types.INT, Types.STRING, Types.STRING}, new String[] {"a", "b", "c"}))); tEnv().createTemporaryView("my_table", stream); } protected abstract String format(); @Test @Test public void testPartition() throws Exception { tEnv().executeSql("CREATE TABLE sink_table (a int, b string, c string) partitioned by (b) with (" + options() + ")"); tEnv().executeSql("insert into sink_table select * from my_table").await(); assertIterator(tEnv().executeSql("select * from sink_table").collect()); File path = new File(URI.create(resultPath)); assertEquals(10, path.listFiles().length); for (int i = 0; i < 10; i++) { File partition = new File(path, "b=" + i); assertFiles(partition.listFiles(), true); } } private String options() { return "'connector'='filesystem'," + "'sink.partition-commit.policy.kind'='success-file'," + "'auto-compaction'='true'," + "'compaction.file-size' = '128MB'," + "'sink.rolling-policy.file-size' = '1b'," + kv("format", format()) + "," + kv("path", resultPath); } private String kv(String key, String value) { return String.format("'%s'='%s'", key, value); } private void assertIterator(CloseableIterator<Row> iterator) throws Exception { List<Row> result = CollectionUtil.iteratorToList(iterator); iterator.close(); result.sort(Comparator.comparingInt(o -> (Integer) o.getField(0))); assertEquals(expectedRows, result); } private void assertFiles(File[] files, boolean containSuccess) { File successFile = null; for (File file : files) { if (containSuccess && file.getName().equals("_SUCCESS")) { successFile = file; } else { assertTrue(file.getName(), file.getName().startsWith(COMPACTED_PREFIX)); } } if (containSuccess) { Assert.assertNotNull("Should contains success file", successFile); } } }
Can remove the todo as the root validity check is gone.
public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); }
Path root = this.getRoot();
public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
@pedroigor Right, at the moment no local JWE decryption is possible, so they are technically opaque for Quarkus
public static boolean isOpaqueToken(String token) { return new StringTokenizer(token, ".").countTokens() != 3; }
return new StringTokenizer(token, ".").countTokens() != 3;
public static boolean isOpaqueToken(String token) { return new StringTokenizer(token, ".").countTokens() != 3; }
class OidcUtils { /** * This pattern uses a positive lookahead to split an expression around the forward slashes * ignoring those which are located inside a pair of the double quotes. */ private static final Pattern CLAIM_PATH_PATTERN = Pattern.compile("\\/(?=(?:(?:[^\"]*\"){2})*[^\"]*$)"); private OidcUtils() { } public static JsonObject decodeJwtContent(String jwt) { StringTokenizer tokens = new StringTokenizer(jwt, "."); tokens.nextToken(); if (!tokens.hasMoreTokens()) { return null; } String encodedContent = tokens.nextToken(); if (tokens.countTokens() != 1) { return null; } try { return new JsonObject(new String(Base64.getUrlDecoder().decode(encodedContent), StandardCharsets.UTF_8)); } catch (IllegalArgumentException ex) { return null; } } public static boolean validateClaims(OidcTenantConfig.Token tokenConfig, JsonObject json) { if (tokenConfig.issuer.isPresent()) { String issuer = json.getString(Claims.iss.name()); if (!tokenConfig.issuer.get().equals(issuer)) { throw new OIDCException("Invalid issuer"); } } if (tokenConfig.audience.isPresent()) { Object claimValue = json.getValue(Claims.aud.name()); List<String> audience = Collections.emptyList(); if (claimValue instanceof JsonArray) { audience = convertJsonArrayToList((JsonArray) claimValue); } else if (claimValue != null) { audience = Arrays.asList((String) claimValue); } if (!audience.containsAll(tokenConfig.audience.get())) { throw new OIDCException("Invalid audience"); } } return true; } public static List<String> findRoles(String clientId, OidcTenantConfig.Roles rolesConfig, JsonObject json) { if (rolesConfig.getRoleClaimPath().isPresent()) { return findClaimWithRoles(rolesConfig, rolesConfig.getRoleClaimPath().get(), json, true); } List<String> groups = findClaimWithRoles(rolesConfig, Claims.groups.name(), json, false); if (!groups.isEmpty()) { return groups; } else { List<String> allRoles = new LinkedList<>(); allRoles.addAll(findClaimWithRoles(rolesConfig, "realm_access/roles", json, false)); if (clientId != null) { allRoles.addAll(findClaimWithRoles(rolesConfig, "resource_access/" + clientId + "/roles", json, false)); } return allRoles; } } private static List<String> findClaimWithRoles(OidcTenantConfig.Roles rolesConfig, String claimPath, JsonObject json, boolean mustExist) { Object claimValue = findClaimValue(claimPath, json, splitClaimPath(claimPath), 0, mustExist); if (claimValue instanceof JsonArray) { return convertJsonArrayToList((JsonArray) claimValue); } else if (claimValue != null) { String sep = rolesConfig.getRoleClaimSeparator().isPresent() ? rolesConfig.getRoleClaimSeparator().get() : " "; return Arrays.asList(claimValue.toString().split(sep)); } else { return Collections.emptyList(); } } private static String[] splitClaimPath(String claimPath) { return claimPath.indexOf('/') > 0 ? CLAIM_PATH_PATTERN.split(claimPath) : new String[] { claimPath }; } private static Object findClaimValue(String claimPath, JsonObject json, String[] pathArray, int step, boolean mustExist) { Object claimValue = json.getValue(pathArray[step].replace("\"", "")); if (claimValue == null) { if (mustExist) { throw new OIDCException("No claim exists at the path " + claimPath + " at the path segment " + pathArray[step]); } } else if (step + 1 < pathArray.length) { if (claimValue instanceof JsonObject) { int nextStep = step + 1; return findClaimValue(claimPath, (JsonObject) claimValue, pathArray, nextStep, mustExist); } else { throw new OIDCException("Claim value at the path " + claimPath + " is not a json object"); } } return claimValue; } private static List<String> convertJsonArrayToList(JsonArray claimValue) { List<String> list = new ArrayList<>(claimValue.size()); for (int i = 0; i < claimValue.size(); i++) { list.add(claimValue.getString(i)); } return list; } static QuarkusSecurityIdentity validateAndCreateIdentity(TokenCredential credential, OidcTenantConfig config, JsonObject tokenJson) { try { OidcUtils.validateClaims(config.getToken(), tokenJson); } catch (OIDCException e) { throw new AuthenticationFailedException(e); } QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder(); builder.addCredential(credential); JsonWebToken jwtPrincipal; try { JwtClaims jwtClaims = JwtClaims.parse(tokenJson.encode()); jwtClaims.setClaim(Claims.raw_token.name(), credential.getToken()); jwtPrincipal = new OidcJwtCallerPrincipal(jwtClaims, credential, config.token.principalClaim.isPresent() ? config.token.principalClaim.get() : null); } catch (InvalidJwtException e) { throw new AuthenticationFailedException(e); } builder.setPrincipal(jwtPrincipal); try { String clientId = config.getClientId().isPresent() ? config.getClientId().get() : null; for (String role : OidcUtils.findRoles(clientId, config.getRoles(), tokenJson)) { builder.addRole(role); } } catch (Exception e) { throw new ForbiddenException(e); } return builder.build(); } }
class OidcUtils { /** * This pattern uses a positive lookahead to split an expression around the forward slashes * ignoring those which are located inside a pair of the double quotes. */ private static final Pattern CLAIM_PATH_PATTERN = Pattern.compile("\\/(?=(?:(?:[^\"]*\"){2})*[^\"]*$)"); private OidcUtils() { } public static JsonObject decodeJwtContent(String jwt) { StringTokenizer tokens = new StringTokenizer(jwt, "."); tokens.nextToken(); if (!tokens.hasMoreTokens()) { return null; } String encodedContent = tokens.nextToken(); if (tokens.countTokens() != 1) { return null; } try { return new JsonObject(new String(Base64.getUrlDecoder().decode(encodedContent), StandardCharsets.UTF_8)); } catch (IllegalArgumentException ex) { return null; } } public static boolean validateClaims(OidcTenantConfig.Token tokenConfig, JsonObject json) { if (tokenConfig.issuer.isPresent()) { String issuer = json.getString(Claims.iss.name()); if (!tokenConfig.issuer.get().equals(issuer)) { throw new OIDCException("Invalid issuer"); } } if (tokenConfig.audience.isPresent()) { Object claimValue = json.getValue(Claims.aud.name()); List<String> audience = Collections.emptyList(); if (claimValue instanceof JsonArray) { audience = convertJsonArrayToList((JsonArray) claimValue); } else if (claimValue != null) { audience = Arrays.asList((String) claimValue); } if (!audience.containsAll(tokenConfig.audience.get())) { throw new OIDCException("Invalid audience"); } } return true; } public static List<String> findRoles(String clientId, OidcTenantConfig.Roles rolesConfig, JsonObject json) { if (rolesConfig.getRoleClaimPath().isPresent()) { return findClaimWithRoles(rolesConfig, rolesConfig.getRoleClaimPath().get(), json, true); } List<String> groups = findClaimWithRoles(rolesConfig, Claims.groups.name(), json, false); if (!groups.isEmpty()) { return groups; } else { List<String> allRoles = new LinkedList<>(); allRoles.addAll(findClaimWithRoles(rolesConfig, "realm_access/roles", json, false)); if (clientId != null) { allRoles.addAll(findClaimWithRoles(rolesConfig, "resource_access/" + clientId + "/roles", json, false)); } return allRoles; } } private static List<String> findClaimWithRoles(OidcTenantConfig.Roles rolesConfig, String claimPath, JsonObject json, boolean mustExist) { Object claimValue = findClaimValue(claimPath, json, splitClaimPath(claimPath), 0, mustExist); if (claimValue instanceof JsonArray) { return convertJsonArrayToList((JsonArray) claimValue); } else if (claimValue != null) { String sep = rolesConfig.getRoleClaimSeparator().isPresent() ? rolesConfig.getRoleClaimSeparator().get() : " "; return Arrays.asList(claimValue.toString().split(sep)); } else { return Collections.emptyList(); } } private static String[] splitClaimPath(String claimPath) { return claimPath.indexOf('/') > 0 ? CLAIM_PATH_PATTERN.split(claimPath) : new String[] { claimPath }; } private static Object findClaimValue(String claimPath, JsonObject json, String[] pathArray, int step, boolean mustExist) { Object claimValue = json.getValue(pathArray[step].replace("\"", "")); if (claimValue == null) { if (mustExist) { throw new OIDCException("No claim exists at the path " + claimPath + " at the path segment " + pathArray[step]); } } else if (step + 1 < pathArray.length) { if (claimValue instanceof JsonObject) { int nextStep = step + 1; return findClaimValue(claimPath, (JsonObject) claimValue, pathArray, nextStep, mustExist); } else { throw new OIDCException("Claim value at the path " + claimPath + " is not a json object"); } } return claimValue; } private static List<String> convertJsonArrayToList(JsonArray claimValue) { List<String> list = new ArrayList<>(claimValue.size()); for (int i = 0; i < claimValue.size(); i++) { list.add(claimValue.getString(i)); } return list; } static QuarkusSecurityIdentity validateAndCreateIdentity(TokenCredential credential, OidcTenantConfig config, JsonObject tokenJson) { try { OidcUtils.validateClaims(config.getToken(), tokenJson); } catch (OIDCException e) { throw new AuthenticationFailedException(e); } QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder(); builder.addCredential(credential); JsonWebToken jwtPrincipal; try { JwtClaims jwtClaims = JwtClaims.parse(tokenJson.encode()); jwtClaims.setClaim(Claims.raw_token.name(), credential.getToken()); jwtPrincipal = new OidcJwtCallerPrincipal(jwtClaims, credential, config.token.principalClaim.isPresent() ? config.token.principalClaim.get() : null); } catch (InvalidJwtException e) { throw new AuthenticationFailedException(e); } builder.setPrincipal(jwtPrincipal); try { String clientId = config.getClientId().isPresent() ? config.getClientId().get() : null; for (String role : OidcUtils.findRoles(clientId, config.getRoles(), tokenJson)) { builder.addRole(role); } } catch (Exception e) { throw new ForbiddenException(e); } return builder.build(); } }
There probably should be some abstract code based on what Stuart @stuartwdouglas did, so that the default authorizer and this one do not duplicate. What was the problem with only plugging in a custom `HttpSecurityPolicy` ?
public CompletionStage<SecurityIdentity> checkPermission(RoutingContext routingContext) { VertxHttpFacade httpFacade = new VertxHttpFacade(routingContext); AuthorizationContext result = delegate.authorize(httpFacade); if (result.isGranted()) { QuarkusHttpUser user = (QuarkusHttpUser) routingContext.user(); if (user == null) { return attemptAnonymousAuthentication(routingContext); } return enhanceSecurityIdentity(user.getSecurityIdentity(), result); } return CompletableFuture.completedFuture(null); }
if (result.isGranted()) {
public CompletionStage<SecurityIdentity> checkPermission(RoutingContext routingContext) { VertxHttpFacade httpFacade = new VertxHttpFacade(routingContext); AuthorizationContext result = delegate.authorize(httpFacade); if (result.isGranted()) { QuarkusHttpUser user = (QuarkusHttpUser) routingContext.user(); if (user == null) { return attemptAnonymousAuthentication(routingContext); } return enhanceSecurityIdentity(user.getSecurityIdentity(), result); } return CompletableFuture.completedFuture(null); }
class KeycloakPolicyEnforcerAuthorizer extends HttpAuthorizer { private KeycloakAdapterPolicyEnforcer delegate; @Override private CompletableFuture<SecurityIdentity> enhanceSecurityIdentity(SecurityIdentity current, AuthorizationContext context) { Map<String, Object> attributes = new HashMap<>(current.getAttributes()); attributes.put("permissions", context.getPermissions()); return CompletableFuture.completedFuture(new QuarkusSecurityIdentity.Builder() .addAttributes(attributes) .setPrincipal(current.getPrincipal()) .addRoles(current.getRoles()) .addCredentials(current.getCredentials()) .addPermissionChecker(new Function<Permission, CompletionStage<Boolean>>() { @Override public CompletionStage<Boolean> apply(Permission permission) { if (context != null) { String scopes = permission.getActions(); if (scopes == null) { return CompletableFuture.completedFuture(context.hasResourcePermission(permission.getName())); } for (String scope : scopes.split(",")) { if (!context.hasPermission(permission.getName(), scope)) { return CompletableFuture.completedFuture(false); } } return CompletableFuture.completedFuture(true); } return CompletableFuture.completedFuture(false); } }).build()); } public void init(OidcConfig oidcConfig, KeycloakPolicyEnforcerConfig config) { AdapterConfig adapterConfig = new AdapterConfig(); String authServerUrl = oidcConfig.getAuthServerUrl(); try { adapterConfig.setRealm(authServerUrl.substring(authServerUrl.lastIndexOf('/') + 1)); adapterConfig.setAuthServerUrl(authServerUrl.substring(0, authServerUrl.lastIndexOf("/realms"))); } catch (Exception cause) { throw new RuntimeException("Failed to parse the realm name.", cause); } adapterConfig.setResource(oidcConfig.getClientId().get()); adapterConfig.setCredentials(getCredentials(oidcConfig)); PolicyEnforcerConfig enforcerConfig = getPolicyEnforcerConfig(config, adapterConfig); if (enforcerConfig == null) { return; } adapterConfig.setPolicyEnforcerConfig(enforcerConfig); this.delegate = new KeycloakAdapterPolicyEnforcer( new PolicyEnforcer(KeycloakDeploymentBuilder.build(adapterConfig), adapterConfig)); } private Map<String, Object> getCredentials(OidcConfig oidcConfig) { Map<String, Object> credentials = new HashMap<>(); Optional<String> clientSecret = oidcConfig.getCredentials().getSecret(); if (clientSecret.isPresent()) { credentials.put("secret", clientSecret.orElse(null)); } return credentials; } private Map<String, Map<String, Object>> getClaimInformationPointConfig( KeycloakPolicyEnforcerConfig.KeycloakConfigPolicyEnforcer.ClaimInformationPointConfig config) { Map<String, Map<String, Object>> cipConfig = new HashMap<>(); for (Map.Entry<String, Map<String, String>> entry : config.simpleConfig.entrySet()) { cipConfig.put(entry.getKey(), new HashMap<>(entry.getValue())); } for (Map.Entry<String, Map<String, Map<String, String>>> entry : config.complexConfig.entrySet()) { cipConfig.computeIfAbsent(entry.getKey(), s -> new HashMap<>()).putAll(new HashMap<>(entry.getValue())); } return cipConfig; } private PolicyEnforcerConfig getPolicyEnforcerConfig(KeycloakPolicyEnforcerConfig config, AdapterConfig adapterConfig) { if (config.policyEnforcer != null && config.policyEnforcer.enable) { PolicyEnforcerConfig enforcerConfig = new PolicyEnforcerConfig(); enforcerConfig.setLazyLoadPaths(config.policyEnforcer.lazyLoadPaths); enforcerConfig.setEnforcementMode( PolicyEnforcerConfig.EnforcementMode.valueOf(config.policyEnforcer.enforcementMode)); enforcerConfig.setHttpMethodAsScope(config.policyEnforcer.httpMethodAsScope); enforcerConfig.setOnDenyRedirectTo(config.policyEnforcer.onDenyRedirectTo.orElse(null)); PolicyEnforcerConfig.PathCacheConfig pathCacheConfig = new PolicyEnforcerConfig.PathCacheConfig(); pathCacheConfig.setLifespan(config.policyEnforcer.pathCache.lifespan); pathCacheConfig.setMaxEntries(config.policyEnforcer.pathCache.maxEntries); enforcerConfig.setPathCacheConfig(pathCacheConfig); if (config.policyEnforcer.userManagedAccess) { enforcerConfig.setUserManagedAccess(new PolicyEnforcerConfig.UserManagedAccessConfig()); } enforcerConfig.setClaimInformationPointConfig( getClaimInformationPointConfig(config.policyEnforcer.claimInformationPoint)); enforcerConfig.setPaths(config.policyEnforcer.paths.values().stream().map( pathConfig -> { PolicyEnforcerConfig.PathConfig config1 = new PolicyEnforcerConfig.PathConfig(); config1.setName(pathConfig.name.orElse(null)); config1.setPath(pathConfig.path.orElse(null)); config1.setEnforcementMode(pathConfig.enforcementMode); config1.setMethods(pathConfig.methods.values().stream().map( methodConfig -> { PolicyEnforcerConfig.MethodConfig mConfig = new PolicyEnforcerConfig.MethodConfig(); mConfig.setMethod(methodConfig.method); mConfig.setScopes(methodConfig.scopes); mConfig.setScopesEnforcementMode(methodConfig.scopesEnforcementMode); return mConfig; }).collect(Collectors.toList())); config1.setClaimInformationPointConfig( getClaimInformationPointConfig(pathConfig.claimInformationPoint)); return config1; }).collect(Collectors.toList())); return enforcerConfig; } return null; } }
class KeycloakPolicyEnforcerAuthorizer extends HttpAuthorizer { private KeycloakAdapterPolicyEnforcer delegate; @Override private CompletableFuture<SecurityIdentity> enhanceSecurityIdentity(SecurityIdentity current, AuthorizationContext context) { Map<String, Object> attributes = new HashMap<>(current.getAttributes()); attributes.put("permissions", context.getPermissions()); return CompletableFuture.completedFuture(new QuarkusSecurityIdentity.Builder() .addAttributes(attributes) .setPrincipal(current.getPrincipal()) .addRoles(current.getRoles()) .addCredentials(current.getCredentials()) .addPermissionChecker(new Function<Permission, CompletionStage<Boolean>>() { @Override public CompletionStage<Boolean> apply(Permission permission) { if (context != null) { String scopes = permission.getActions(); if (scopes == null) { return CompletableFuture.completedFuture(context.hasResourcePermission(permission.getName())); } for (String scope : scopes.split(",")) { if (!context.hasPermission(permission.getName(), scope)) { return CompletableFuture.completedFuture(false); } } return CompletableFuture.completedFuture(true); } return CompletableFuture.completedFuture(false); } }).build()); } public void init(OidcConfig oidcConfig, KeycloakPolicyEnforcerConfig config) { AdapterConfig adapterConfig = new AdapterConfig(); String authServerUrl = oidcConfig.getAuthServerUrl(); try { adapterConfig.setRealm(authServerUrl.substring(authServerUrl.lastIndexOf('/') + 1)); adapterConfig.setAuthServerUrl(authServerUrl.substring(0, authServerUrl.lastIndexOf("/realms"))); } catch (Exception cause) { throw new RuntimeException("Failed to parse the realm name.", cause); } adapterConfig.setResource(oidcConfig.getClientId().get()); adapterConfig.setCredentials(getCredentials(oidcConfig)); PolicyEnforcerConfig enforcerConfig = getPolicyEnforcerConfig(config, adapterConfig); if (enforcerConfig == null) { return; } adapterConfig.setPolicyEnforcerConfig(enforcerConfig); this.delegate = new KeycloakAdapterPolicyEnforcer( new PolicyEnforcer(KeycloakDeploymentBuilder.build(adapterConfig), adapterConfig)); } private Map<String, Object> getCredentials(OidcConfig oidcConfig) { Map<String, Object> credentials = new HashMap<>(); Optional<String> clientSecret = oidcConfig.getCredentials().getSecret(); if (clientSecret.isPresent()) { credentials.put("secret", clientSecret.orElse(null)); } return credentials; } private Map<String, Map<String, Object>> getClaimInformationPointConfig( KeycloakPolicyEnforcerConfig.KeycloakConfigPolicyEnforcer.ClaimInformationPointConfig config) { Map<String, Map<String, Object>> cipConfig = new HashMap<>(); for (Map.Entry<String, Map<String, String>> entry : config.simpleConfig.entrySet()) { cipConfig.put(entry.getKey(), new HashMap<>(entry.getValue())); } for (Map.Entry<String, Map<String, Map<String, String>>> entry : config.complexConfig.entrySet()) { cipConfig.computeIfAbsent(entry.getKey(), s -> new HashMap<>()).putAll(new HashMap<>(entry.getValue())); } return cipConfig; } private PolicyEnforcerConfig getPolicyEnforcerConfig(KeycloakPolicyEnforcerConfig config, AdapterConfig adapterConfig) { if (config.policyEnforcer != null && config.policyEnforcer.enable) { PolicyEnforcerConfig enforcerConfig = new PolicyEnforcerConfig(); enforcerConfig.setLazyLoadPaths(config.policyEnforcer.lazyLoadPaths); enforcerConfig.setEnforcementMode( PolicyEnforcerConfig.EnforcementMode.valueOf(config.policyEnforcer.enforcementMode)); enforcerConfig.setHttpMethodAsScope(config.policyEnforcer.httpMethodAsScope); PolicyEnforcerConfig.PathCacheConfig pathCacheConfig = new PolicyEnforcerConfig.PathCacheConfig(); pathCacheConfig.setLifespan(config.policyEnforcer.pathCache.lifespan); pathCacheConfig.setMaxEntries(config.policyEnforcer.pathCache.maxEntries); enforcerConfig.setPathCacheConfig(pathCacheConfig); enforcerConfig.setClaimInformationPointConfig( getClaimInformationPointConfig(config.policyEnforcer.claimInformationPoint)); enforcerConfig.setPaths(config.policyEnforcer.paths.values().stream().map( pathConfig -> { PolicyEnforcerConfig.PathConfig config1 = new PolicyEnforcerConfig.PathConfig(); config1.setName(pathConfig.name.orElse(null)); config1.setPath(pathConfig.path.orElse(null)); config1.setEnforcementMode(pathConfig.enforcementMode); config1.setMethods(pathConfig.methods.values().stream().map( methodConfig -> { PolicyEnforcerConfig.MethodConfig mConfig = new PolicyEnforcerConfig.MethodConfig(); mConfig.setMethod(methodConfig.method); mConfig.setScopes(methodConfig.scopes); mConfig.setScopesEnforcementMode(methodConfig.scopesEnforcementMode); return mConfig; }).collect(Collectors.toList())); config1.setClaimInformationPointConfig( getClaimInformationPointConfig(pathConfig.claimInformationPoint)); return config1; }).collect(Collectors.toList())); return enforcerConfig; } return null; } }
> Actually with this implementation, synchronization is not required unless conf.write will explode if there is some concurrent modification. Yeah thinking again if conf is modified half way the whole writeExternal won't work anyways, going to remove the synchronized block
public void writeExternal(ObjectOutput out) throws IOException { String canonicalName; synchronized (this) { canonicalName = conf.getClass().getCanonicalName(); if (confMutated) { confMutated = false; ByteArrayOutputStream baos = new ByteArrayOutputStream(512); try (DataOutputStream dos = new DataOutputStream(baos)) { conf.write(dos); serializationCache = baos.toByteArray(); } } } out.writeUTF(canonicalName); out.write(serializationCache); }
out.writeUTF(canonicalName);
public void writeExternal(ObjectOutput out) throws IOException { if (confMutated || serializationCache == null) { confMutated = false; ByteArrayOutputStream baos = new ByteArrayOutputStream(512); try (DataOutputStream dos = new DataOutputStream(baos)) { conf.write(dos); serializationCache = baos.toByteArray(); } } out.writeUTF(conf.getClass().getCanonicalName()); out.write(serializationCache); }
class SerializableConfiguration implements Externalizable { private static final long serialVersionUID = 0L; private transient Configuration conf; private transient boolean confMutated; private transient byte[] serializationCache; public SerializableConfiguration() {} public SerializableConfiguration(Configuration conf) { if (conf == null) { throw new NullPointerException("Configuration must not be null."); } this.confMutated = true; this.conf = conf; } public Configuration get() { confMutated = true; return conf; } @Override @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { confMutated = true; String className = in.readUTF(); try { conf = Class.forName(className) .asSubclass(Configuration.class) .getDeclaredConstructor() .newInstance(); conf.readFields(in); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { throw new IOException("Unable to create configuration: " + e); } } /** Returns new configured {@link Job} object. */ public static Job newJob(@Nullable SerializableConfiguration conf) throws IOException { if (conf == null) { return Job.getInstance(); } else { Job job = Job.getInstance(new Configuration(false)); for (Map.Entry<String, String> entry : conf.get()) { job.getConfiguration().set(entry.getKey(), entry.getValue()); } return job; } } /** Returns a new configuration instance using provided flags. */ public static SerializableConfiguration fromMap(Map<String, String> entries) { Configuration hadoopConfiguration = new Configuration(); for (Map.Entry<String, String> entry : entries.entrySet()) { hadoopConfiguration.set(entry.getKey(), entry.getValue()); } return new SerializableConfiguration(hadoopConfiguration); } /** Returns new populated {@link Configuration} object. */ public static Configuration newConfiguration(@Nullable SerializableConfiguration conf) { if (conf == null) { return new Configuration(); } else { return conf.get(); } } }
class SerializableConfiguration implements Externalizable { private static final long serialVersionUID = 0L; private transient Configuration conf; private transient boolean confMutated; private transient byte[] serializationCache; public SerializableConfiguration() {} public SerializableConfiguration(Configuration conf) { if (conf == null) { throw new NullPointerException("Configuration must not be null."); } this.conf = conf; } public Configuration get() { confMutated = true; return conf; } @Override @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { confMutated = true; String className = in.readUTF(); try { conf = Class.forName(className) .asSubclass(Configuration.class) .getDeclaredConstructor() .newInstance(); conf.readFields(in); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { throw new IOException("Unable to create configuration: " + e); } } /** Returns new configured {@link Job} object. */ public static Job newJob(@Nullable SerializableConfiguration conf) throws IOException { if (conf == null) { return Job.getInstance(); } else { Job job = Job.getInstance(new Configuration(false)); for (Map.Entry<String, String> entry : conf.get()) { job.getConfiguration().set(entry.getKey(), entry.getValue()); } return job; } } /** Returns a new configuration instance using provided flags. */ public static SerializableConfiguration fromMap(Map<String, String> entries) { Configuration hadoopConfiguration = new Configuration(); for (Map.Entry<String, String> entry : entries.entrySet()) { hadoopConfiguration.set(entry.getKey(), entry.getValue()); } return new SerializableConfiguration(hadoopConfiguration); } /** Returns new populated {@link Configuration} object. */ public static Configuration newConfiguration(@Nullable SerializableConfiguration conf) { if (conf == null) { return new Configuration(); } else { return conf.get(); } } }
Don't do `verify` when the return value of calling the function is adequate verification.`
public void testLaunchFnHarnessAndTeardownCleanly() throws Exception { Function<String, String> environmentVariableMock = mock(Function.class); PipelineOptions options = PipelineOptionsFactory.create(); when(environmentVariableMock.apply("HARNESS_ID")).thenReturn("id"); when(environmentVariableMock.apply("PIPELINE_OPTIONS")) .thenReturn(PipelineOptionsTranslation.toJson(options)); List<BeamFnApi.LogEntry> logEntries = new ArrayList<>(); List<BeamFnApi.InstructionResponse> instructionResponses = mock(List.class); BeamFnLoggingGrpc.BeamFnLoggingImplBase loggingService = new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<LogControl> responseObserver) { return TestStreams.withOnNext( (BeamFnApi.LogEntry.List entries) -> logEntries.addAll(entries.getLogEntriesList())) .withOnCompleted(responseObserver::onCompleted) .build(); } }; BeamFnControlGrpc.BeamFnControlImplBase controlService = new BeamFnControlGrpc.BeamFnControlImplBase() { @Override public StreamObserver<InstructionResponse> control( StreamObserver<InstructionRequest> responseObserver) { CountDownLatch waitForResponses = new CountDownLatch(1 /* number of responses expected */); options .as(GcsOptions.class) .getExecutorService() .submit( () -> { responseObserver.onNext(INSTRUCTION_REQUEST); Uninterruptibles.awaitUninterruptibly(waitForResponses); responseObserver.onCompleted(); }); return TestStreams.withOnNext( (InstructionResponse t) -> { instructionResponses.add(t); waitForResponses.countDown(); }) .withOnCompleted(waitForResponses::countDown) .build(); } }; Server loggingServer = ServerBuilder.forPort(0).addService(loggingService).build(); loggingServer.start(); try { Server controlServer = ServerBuilder.forPort(0).addService(controlService).build(); controlServer.start(); try { Endpoints.ApiServiceDescriptor loggingDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + loggingServer.getPort()) .build(); Endpoints.ApiServiceDescriptor controlDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + controlServer.getPort()) .build(); when(environmentVariableMock.apply("LOGGING_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(loggingDescriptor)); when(environmentVariableMock.apply("CONTROL_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(controlDescriptor)); FnHarness.main(environmentVariableMock); } finally { controlServer.shutdownNow(); } } finally { loggingServer.shutdownNow(); } InOrder inOrder = inOrder(onStartupMock, beforeProcessingMock, environmentVariableMock, instructionResponses); inOrder.verify(onStartupMock).run(); inOrder.verify(environmentVariableMock, atLeastOnce()).apply(any()); inOrder.verify(beforeProcessingMock).accept(any()); inOrder.verify(instructionResponses).add(INSTRUCTION_RESPONSE); }
inOrder.verify(environmentVariableMock, atLeastOnce()).apply(any());
public void testLaunchFnHarnessAndTeardownCleanly() throws Exception { Function<String, String> environmentVariableMock = mock(Function.class); PipelineOptions options = PipelineOptionsFactory.create(); when(environmentVariableMock.apply("HARNESS_ID")).thenReturn("id"); when(environmentVariableMock.apply("PIPELINE_OPTIONS")) .thenReturn(PipelineOptionsTranslation.toJson(options)); List<BeamFnApi.LogEntry> logEntries = new ArrayList<>(); List<BeamFnApi.InstructionResponse> instructionResponses = mock(List.class); BeamFnLoggingGrpc.BeamFnLoggingImplBase loggingService = new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<LogControl> responseObserver) { return TestStreams.withOnNext( (BeamFnApi.LogEntry.List entries) -> logEntries.addAll(entries.getLogEntriesList())) .withOnCompleted(responseObserver::onCompleted) .build(); } }; BeamFnControlGrpc.BeamFnControlImplBase controlService = new BeamFnControlGrpc.BeamFnControlImplBase() { @Override public StreamObserver<InstructionResponse> control( StreamObserver<InstructionRequest> responseObserver) { CountDownLatch waitForResponses = new CountDownLatch(1 /* number of responses expected */); options .as(GcsOptions.class) .getExecutorService() .submit( () -> { responseObserver.onNext(INSTRUCTION_REQUEST); Uninterruptibles.awaitUninterruptibly(waitForResponses); responseObserver.onCompleted(); }); return TestStreams.withOnNext( (InstructionResponse t) -> { instructionResponses.add(t); waitForResponses.countDown(); }) .withOnCompleted(waitForResponses::countDown) .build(); } }; Server loggingServer = ServerBuilder.forPort(0).addService(loggingService).build(); loggingServer.start(); try { Server controlServer = ServerBuilder.forPort(0).addService(controlService).build(); controlServer.start(); try { Endpoints.ApiServiceDescriptor loggingDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + loggingServer.getPort()) .build(); Endpoints.ApiServiceDescriptor controlDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + controlServer.getPort()) .build(); when(environmentVariableMock.apply("LOGGING_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(loggingDescriptor)); when(environmentVariableMock.apply("CONTROL_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(controlDescriptor)); FnHarness.main(environmentVariableMock); } finally { controlServer.shutdownNow(); } } finally { loggingServer.shutdownNow(); } InOrder inOrder = inOrder(onStartupMock, beforeProcessingMock, environmentVariableMock, instructionResponses); inOrder.verify(onStartupMock).run(); inOrder.verify(environmentVariableMock, atLeastOnce()).apply(any()); inOrder.verify(beforeProcessingMock).accept(any()); inOrder.verify(instructionResponses).add(INSTRUCTION_RESPONSE); }
class FnHarnessTestInitializer extends BeamWorkerInitializer { @Override public void onStartup() { onStartupMock.run(); } @Override public void beforeProcessing(PipelineOptions options) { beforeProcessingMock.accept(options); } }
class FnHarnessTestInitializer implements JvmInitializer { @Override public void onStartup() { onStartupMock.run(); } @Override public void beforeProcessing(PipelineOptions options) { beforeProcessingMock.accept(options); } }
actually this a little confused to me, QualifiedNameContext named as db. but we get catalog from this db variable. how about renaming the QualifiedNameContext variable name of ShowDatabasesStatementContext
public ParseNode visitShowDatabasesStatement(StarRocksParser.ShowDatabasesStatementContext context) { String catalog = null; if (context.db != null) { QualifiedName qualifiedName = getQualifiedName(context.db); catalog = qualifiedName.toString(); } if (catalog == null) { if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); return new ShowDbStmt(stringLiteral.getValue()); } else if (context.expression() != null) { return new ShowDbStmt(null, (Expr) visit(context.expression())); } else { return new ShowDbStmt(null, null, null); } } else { if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); return new ShowDbStmt(stringLiteral.getValue(), catalog); } else if (context.expression() != null) { return new ShowDbStmt(null, (Expr) visit(context.expression()), catalog); } else { return new ShowDbStmt(null, null, catalog); } } }
QualifiedName qualifiedName = getQualifiedName(context.db);
public ParseNode visitShowDatabasesStatement(StarRocksParser.ShowDatabasesStatementContext context) { String catalog = null; if (context.catalog != null) { QualifiedName dbName = getQualifiedName(context.catalog); catalog = dbName.toString(); } if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); return new ShowDbStmt(stringLiteral.getValue(), catalog); } else if (context.expression() != null) { return new ShowDbStmt(null, (Expr) visit(context.expression()), catalog); } else { return new ShowDbStmt(null, null, catalog); } }
class AstBuilder extends StarRocksBaseVisitor<ParseNode> { private final long sqlMode; public AstBuilder(long sqlMode) { this.sqlMode = sqlMode; } @Override public ParseNode visitSingleStatement(StarRocksParser.SingleStatementContext context) { return visit(context.statement()); } @Override public ParseNode visitCreateTableAsSelectStatement(StarRocksParser.CreateTableAsSelectStatementContext context) { Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } CreateTableStmt createTableStmt = new CreateTableStmt( context.IF() != null, false, qualifiedNameToTableName(getQualifiedName(context.qualifiedName())), null, "olap", null, context.partitionDesc() == null ? null : (PartitionDesc) visit(context.partitionDesc()), context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()), properties, null, context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue()); List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class); return new CreateTableAsSelectStmt( createTableStmt, columns == null ? null : columns.stream().map(Identifier::getValue).collect(toList()), (QueryStatement) visit(context.queryStatement())); } @Override public ParseNode visitAlterTableStatement(StarRocksParser.AlterTableStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<AlterClause> alterClauses = visit(context.alterClause(), AlterClause.class); return new AlterTableStmt(targetTableName, alterClauses); } @Override public ParseNode visitDropTableStatement(StarRocksParser.DropTableStatementContext context) { boolean ifExists = context.IF() != null && context.EXISTS() != null; boolean force = context.FORCE() != null; QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new DropTableStmt(ifExists, targetTableName, force); } @Override public ParseNode visitShowTableStatement(StarRocksParser.ShowTableStatementContext context) { boolean isVerbose = context.FULL() != null; String database = null; if (context.qualifiedName() != null) { database = getQualifiedName(context.qualifiedName()).toString(); } if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); return new ShowTableStmt(database, isVerbose, stringLiteral.getValue()); } else if (context.expression() != null) { return new ShowTableStmt(database, isVerbose, null, (Expr) visit(context.expression())); } else { return new ShowTableStmt(database, isVerbose, null); } } @Override public ParseNode visitCreateIndexStatement(StarRocksParser.CreateIndexStatementContext context) { String indexName = ((Identifier) visit(context.identifier())).getValue(); List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class); String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } IndexDef indexDef = new IndexDef(indexName, columnList.stream().map(Identifier::getValue).collect(toList()), IndexDef.IndexType.BITMAP, comment); CreateIndexClause createIndexClause = new CreateIndexClause(null, indexDef, false); QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new AlterTableStmt(targetTableName, Lists.newArrayList(createIndexClause)); } @Override public ParseNode visitDropIndexStatement(StarRocksParser.DropIndexStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); DropIndexClause dropIndexClause = new DropIndexClause(identifier.getValue(), null, false); QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new AlterTableStmt(targetTableName, Lists.newArrayList(dropIndexClause)); } @Override public ParseNode visitShowColumnStatement(StarRocksParser.ShowColumnStatementContext context) { QualifiedName tableName = getQualifiedName(context.table); QualifiedName dbName = null; if (context.db != null) { dbName = getQualifiedName(context.db); } String pattern = null; if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); pattern = stringLiteral.getValue(); } Expr where = null; if (context.expression() != null) { where = (Expr) visit(context.expression()); } return new ShowColumnStmt(qualifiedNameToTableName(tableName), dbName == null ? null : dbName.toString(), pattern, context.FULL() != null, where); } @Override public ParseNode visitShowTableStatusStatement(StarRocksParser.ShowTableStatusStatementContext context) { QualifiedName dbName = null; if (context.qualifiedName() != null) { dbName = getQualifiedName(context.db); } String pattern = null; if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); pattern = stringLiteral.getValue(); } Expr where = null; if (context.expression() != null) { where = (Expr) visit(context.expression()); } return new ShowTableStatusStmt(dbName == null ? null : dbName.toString(), pattern, where); } @Override public ParseNode visitCreateViewStatement(StarRocksParser.CreateViewStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<ColWithComment> colWithComments = null; if (context.columnNameWithComment().size() > 0) { colWithComments = visit(context.columnNameWithComment(), ColWithComment.class); } return new CreateViewStmt( context.IF() != null, targetTableName, colWithComments, context.comment() == null ? null : ((StringLiteral) visit(context.comment())).getStringValue(), (QueryStatement) visit(context.queryStatement())); } @Override public ParseNode visitAlterViewStatement(StarRocksParser.AlterViewStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<ColWithComment> colWithComments = null; if (context.columnNameWithComment().size() > 0) { colWithComments = visit(context.columnNameWithComment(), ColWithComment.class); } return new AlterViewStmt(targetTableName, colWithComments, (QueryStatement) visit(context.queryStatement())); } @Override public ParseNode visitDropViewStatement(StarRocksParser.DropViewStatementContext context) { boolean ifExists = context.IF() != null && context.EXISTS() != null; QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new DropTableStmt(ifExists, targetTableName, true, false); } @Override public ParseNode visitSubmitTaskStatement(StarRocksParser.SubmitTaskStatementContext context) { QualifiedName qualifiedName = null; if (context.qualifiedName() != null) { qualifiedName = getQualifiedName(context.qualifiedName()); } Map<String, String> properties = new HashMap<>(); if (context.hint() != null) { for (StarRocksParser.HintContext hintContext : context.hint()) { for (StarRocksParser.HintMapContext hintMapContext : hintContext.hintMap()) { String key = hintMapContext.k.getText(); String value = hintMapContext.v.getText(); properties.put(key, value); } } } CreateTableAsSelectStmt createTableAsSelectStmt = (CreateTableAsSelectStmt) visit(context.createTableAsSelectStatement()); if (qualifiedName == null) { return new SubmitTaskStmt(null, null, properties, createTableAsSelectStmt); } else if (qualifiedName.getParts().size() == 1) { return new SubmitTaskStmt(null, qualifiedName.getParts().get(0), properties, createTableAsSelectStmt); } else if (qualifiedName.getParts().size() == 2) { return new SubmitTaskStmt(SystemInfoService.DEFAULT_CLUSTER + ":" + qualifiedName.getParts().get(0), qualifiedName.getParts().get(1), properties, createTableAsSelectStmt); } else { throw new ParsingException("error task name "); } } @Override public ParseNode visitCreateMaterializedViewStatement( StarRocksParser.CreateMaterializedViewStatementContext context) { if (!Config.enable_experimental_mv) { throw new ParsingException("The experimental mv is disabled"); } boolean ifNotExist = context.IF() != null; QualifiedName qualifiedName = getQualifiedName(context.mvName); TableName tableName = qualifiedNameToTableName(qualifiedName); String comment = context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue(); QueryStatement queryStatement = (QueryStatement) visit(context.queryStatement()); Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } RefreshSchemeDesc refreshSchemeDesc = null; if (context.refreshSchemeDesc() == null) { refreshSchemeDesc = new SyncRefreshSchemeDesc(); } else { refreshSchemeDesc = ((RefreshSchemeDesc) visit(context.refreshSchemeDesc())); } if (refreshSchemeDesc instanceof SyncRefreshSchemeDesc) { if (context.primaryExpression() != null) { throw new IllegalArgumentException( "Partition by is not supported by SYNC refresh type int materialized view"); } if (context.distributionDesc() != null) { throw new IllegalArgumentException( "Distribution by is not supported by SYNC refresh type in materialized view"); } String sql = AST2SQL.toString(queryStatement); StatementBase statement = SqlParser.parseWithOldParser(sql, sqlMode, 0); if (!(statement instanceof SelectStmt)) { throw new IllegalArgumentException("Materialized view query statement only support select"); } return new CreateMaterializedViewStmt(tableName.getTbl(), (SelectStmt) statement, properties); } ExpressionPartitionDesc expressionPartitionDesc = null; if (context.primaryExpression() != null) { Expr expr = (Expr) visit(context.primaryExpression()); if (expr instanceof SlotRef) { expressionPartitionDesc = new ExpressionPartitionDesc(expr); } else if (expr instanceof FunctionCallExpr) { for (Expr child : expr.getChildren()) { if (child instanceof SlotRef) { expressionPartitionDesc = new ExpressionPartitionDesc(expr); break; } } if (expressionPartitionDesc == null) { throw new IllegalArgumentException( "Partition exp not supports:" + expr.toSql()); } } else { throw new IllegalArgumentException( "Partition exp not supports:" + expr.toSql()); } } DistributionDesc distributionDesc = context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()); return new CreateMaterializedViewStatement(tableName, ifNotExist, comment, refreshSchemeDesc, expressionPartitionDesc, distributionDesc, properties, queryStatement); } @Override public ParseNode visitShowMaterializedViewStatement(StarRocksParser.ShowMaterializedViewStatementContext context) { String database = null; if (context.qualifiedName() != null) { database = getQualifiedName(context.qualifiedName()).toString(); } return new ShowMaterializedViewStmt(database); } @Override public ParseNode visitDropMaterializedViewStatement(StarRocksParser.DropMaterializedViewStatementContext context) { QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName()); TableName mvName = qualifiedNameToTableName(mvQualifiedName); return new DropMaterializedViewStmt(context.IF() != null, mvName); } @Override public ParseNode visitAlterSystemStatement(StarRocksParser.AlterSystemStatementContext context) { return new AlterSystemStmt((AlterClause) visit(context.alterClause())); } @Override public ParseNode visitCreateExternalCatalogStatement( StarRocksParser.CreateExternalCatalogStatementContext context) { Identifier identifier = (Identifier) visit(context.identifierOrString()); String catalogName = identifier.getValue(); String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } return new CreateCatalogStmt(catalogName, comment, properties); } @Override public ParseNode visitDropExternalCatalogStatement(StarRocksParser.DropExternalCatalogStatementContext context) { Identifier identifier = (Identifier) visit(context.catalogName); String catalogName = identifier.getValue(); return new DropCatalogStmt(catalogName); } @Override public ParseNode visitShowCatalogsStatement(StarRocksParser.ShowCatalogsStatementContext context) { return new ShowCatalogsStmt(); } @Override public ParseNode visitCreateIndexClause(StarRocksParser.CreateIndexClauseContext context) { String indexName = ((Identifier) visit(context.identifier())).getValue(); List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class); String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } IndexDef indexDef = new IndexDef(indexName, columnList.stream().map(Identifier::getValue).collect(toList()), IndexDef.IndexType.BITMAP, comment); return new CreateIndexClause(null, indexDef, true); } @Override public ParseNode visitDropIndexClause(StarRocksParser.DropIndexClauseContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new DropIndexClause(identifier.getValue(), null, true); } @Override public ParseNode visitTableRenameClause(StarRocksParser.TableRenameClauseContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new TableRenameClause(identifier.getValue()); } @Override public ParseNode visitAdminSetReplicaStatus(StarRocksParser.AdminSetReplicaStatusContext context) { Map<String, String> properties = new HashMap<>(); List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } return new AdminSetReplicaStatusStmt(properties); } @Override public ParseNode visitAddBackendClause(StarRocksParser.AddBackendClauseContext context) { List<String> clusters = context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList()); if (context.TO() != null) { Identifier identifier = (Identifier) visit(context.identifier()); return new AddBackendClause(clusters, identifier.getValue()); } if (context.FREE() != null) { return new AddBackendClause(clusters, true); } return new AddBackendClause(clusters, false); } @Override public ParseNode visitDropBackendClause(StarRocksParser.DropBackendClauseContext context) { List<String> clusters = context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList()); return new DropBackendClause(clusters, context.FORCE() != null); } @Override public ParseNode visitAddFrontendClause(StarRocksParser.AddFrontendClauseContext context) { String cluster = ((StringLiteral) visit(context.string())).getStringValue(); if (context.FOLLOWER() != null) { return new AddFollowerClause(cluster); } else if (context.OBSERVER() != null) { return new AddObserverClause(cluster); } else { Preconditions.checkState(false, "frontend clause error."); return null; } } @Override public ParseNode visitDropFrontendClause(StarRocksParser.DropFrontendClauseContext context) { String cluster = ((StringLiteral) visit(context.string())).getStringValue(); if (context.FOLLOWER() != null) { return new DropFollowerClause(cluster); } else if (context.OBSERVER() != null) { return new DropObserverClause(cluster); } else { Preconditions.checkState(false, "frontend clause error."); return null; } } @Override public ParseNode visitInsertStatement(StarRocksParser.InsertStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); PartitionNames partitionNames = null; if (context.partitionNames() != null) { partitionNames = (PartitionNames) visit(context.partitionNames()); } QueryStatement queryStatement; if (context.VALUES() != null) { List<ValueList> rowValues = visit(context.expressionsWithDefault(), ValueList.class); List<ArrayList<Expr>> rows = rowValues.stream().map(ValueList::getFirstRow).collect(toList()); List<String> colNames = new ArrayList<>(); for (int i = 0; i < rows.get(0).size(); ++i) { colNames.add("column_" + i); } queryStatement = new QueryStatement(new ValuesRelation(rows, colNames)); } else { queryStatement = (QueryStatement) visit(context.queryStatement()); } List<String> targetColumnNames = null; if (context.columnAliases() != null) { List<Identifier> targetColumnNamesIdentifiers = visitIfPresent(context.columnAliases().identifier(), Identifier.class); if (targetColumnNamesIdentifiers != null) { targetColumnNames = targetColumnNamesIdentifiers.stream() .map(Identifier::getValue).map(String::toLowerCase).collect(toList()); } } if (context.explainDesc() != null) { queryStatement.setIsExplain(true, getExplainType(context.explainDesc())); } return new InsertStmt( new InsertTarget(targetTableName, partitionNames), context.label == null ? null : context.label.getText(), targetColumnNames, queryStatement, Lists.newArrayList()); } @Override public ParseNode visitUpdateStatement(StarRocksParser.UpdateStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<ColumnAssignment> assignments = visit(context.assignmentList().assignment(), ColumnAssignment.class); Expr where = context.where != null ? (Expr) visit(context.where) : null; UpdateStmt ret = new UpdateStmt(targetTableName, assignments, where); if (context.explainDesc() != null) { ret.setIsExplain(true, getExplainType(context.explainDesc())); } return ret; } @Override public ParseNode visitDeleteStatement(StarRocksParser.DeleteStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); PartitionNames partitionNames = null; if (context.partitionNames() != null) { partitionNames = (PartitionNames) visit(context.partitionNames()); } Expr where = context.where != null ? (Expr) visit(context.where) : null; DeleteStmt ret = new DeleteStmt(targetTableName, partitionNames, where); if (context.explainDesc() != null) { ret.setIsExplain(true, getExplainType(context.explainDesc())); } return ret; } @Override public ParseNode visitAnalyzeStatement(StarRocksParser.AnalyzeStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName tableName = qualifiedNameToTableName(qualifiedName); List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class); List<String> columnNames = null; if (columns != null) { columnNames = columns.stream().map(Identifier::getValue).collect(toList()); } Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } return new AnalyzeStmt(tableName, columnNames, properties, context.FULL() != null); } @Override public ParseNode visitCreateAnalyzeStatement(StarRocksParser.CreateAnalyzeStatementContext context) { Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } if (context.DATABASE() != null) { return new CreateAnalyzeJobStmt(((Identifier) visit(context.db)).getValue(), context.FULL() != null, properties); } else if (context.TABLE() != null) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName tableName = qualifiedNameToTableName(qualifiedName); List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class); List<String> columnNames = null; if (columns != null) { columnNames = columns.stream().map(Identifier::getValue).collect(toList()); } return new CreateAnalyzeJobStmt(tableName, columnNames, context.FULL() != null, properties); } else { return new CreateAnalyzeJobStmt(context.FULL() != null, properties); } } @Override public ParseNode visitDropAnalyzeJobStatement(StarRocksParser.DropAnalyzeJobStatementContext context) { return new DropAnalyzeJobStmt(Long.parseLong(context.INTEGER_VALUE().getText())); } @Override public ParseNode visitShowAnalyzeStatement(StarRocksParser.ShowAnalyzeStatementContext context) { return new ShowAnalyzeStmt(); } @Override public ParseNode visitCreateWorkGroupStatement(StarRocksParser.CreateWorkGroupStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); String name = identifier.getValue(); List<List<Predicate>> predicatesList = new ArrayList<>(); for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) { List<Predicate> p = visit(classifierContext.expression(), Predicate.class); predicatesList.add(p); } Map<String, String> properties = new HashMap<>(); List<Property> propertyList = visit(context.property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } return new CreateWorkGroupStmt(name, context.EXISTS() != null, context.REPLACE() != null, predicatesList, properties); } @Override public ParseNode visitDropWorkGroupStatement(StarRocksParser.DropWorkGroupStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new DropWorkGroupStmt(identifier.getValue()); } @Override public ParseNode visitAlterWorkGroupStatement(StarRocksParser.AlterWorkGroupStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); String name = identifier.getValue(); if (context.ADD() != null) { List<List<Predicate>> predicatesList = new ArrayList<>(); for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) { List<Predicate> p = visit(classifierContext.expression(), Predicate.class); predicatesList.add(p); } return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.AddClassifiers(predicatesList)); } else if (context.DROP() != null) { if (context.ALL() != null) { return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.DropAllClassifiers()); } else { return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.DropClassifiers(context.INTEGER_VALUE() .stream().map(ParseTree::getText).map(Long::parseLong).collect(toList()))); } } else { Map<String, String> properties = new HashMap<>(); List<Property> propertyList = visit(context.property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.AlterProperties(properties)); } } @Override public ParseNode visitShowWorkGroupStatement(StarRocksParser.ShowWorkGroupStatementContext context) { if (context.RESOURCE_GROUPS() != null) { return new ShowWorkGroupStmt(null, context.ALL() != null); } else { Identifier identifier = (Identifier) visit(context.identifier()); return new ShowWorkGroupStmt(identifier.getValue(), false); } } @Override public ParseNode visitQueryStatement(StarRocksParser.QueryStatementContext context) { QueryRelation queryRelation = (QueryRelation) visit(context.queryBody()); QueryStatement queryStatement = new QueryStatement(queryRelation); if (context.outfile() != null) { queryStatement.setOutFileClause((OutFileClause) visit(context.outfile())); } if (context.explainDesc() != null) { queryStatement.setIsExplain(true, getExplainType(context.explainDesc())); } return queryStatement; } @Override public ParseNode visitQueryBody(StarRocksParser.QueryBodyContext context) { QueryRelation queryRelation = (QueryRelation) visit(context.queryNoWith()); List<CTERelation> withQuery = new ArrayList<>(); if (context.withClause() != null) { withQuery = visit(context.withClause().commonTableExpression(), CTERelation.class); } withQuery.forEach(queryRelation::addCTERelation); return queryRelation; } @Override public ParseNode visitCommonTableExpression(StarRocksParser.CommonTableExpressionContext context) { List<Identifier> columns = null; if (context.columnAliases() != null) { columns = visit(context.columnAliases().identifier(), Identifier.class); } List<String> columnNames = null; if (columns != null) { columnNames = columns.stream().map(Identifier::getValue).collect(toList()); } QueryRelation queryRelation = (QueryRelation) visit(context.queryBody()); return new CTERelation( RelationId.of(queryRelation).hashCode(), ((Identifier) visit(context.name)).getValue(), columnNames, new QueryStatement(queryRelation)); } @Override public ParseNode visitQueryNoWith(StarRocksParser.QueryNoWithContext context) { List<OrderByElement> orderByElements = new ArrayList<>(); if (context.ORDER() != null) { orderByElements.addAll(visit(context.sortItem(), OrderByElement.class)); } LimitElement limitElement = null; if (context.limitElement() != null) { limitElement = (LimitElement) visit(context.limitElement()); } QueryRelation term = (QueryRelation) visit(context.queryTerm()); term.setOrderBy(orderByElements); term.setLimit(limitElement); return term; } @Override public ParseNode visitSetOperation(StarRocksParser.SetOperationContext context) { QueryRelation left = (QueryRelation) visit(context.left); QueryRelation right = (QueryRelation) visit(context.right); boolean distinct = true; if (context.setQuantifier() != null) { if (context.setQuantifier().DISTINCT() != null) { distinct = true; } else if (context.setQuantifier().ALL() != null) { distinct = false; } } SetQualifier setQualifier = distinct ? SetQualifier.DISTINCT : SetQualifier.ALL; switch (context.operator.getType()) { case StarRocksLexer.UNION: if (left instanceof UnionRelation && ((UnionRelation) left).getQualifier().equals(setQualifier)) { ((UnionRelation) left).addRelation(right); return left; } else { return new UnionRelation(Lists.newArrayList(left, right), setQualifier); } case StarRocksLexer.INTERSECT: if (left instanceof IntersectRelation && ((IntersectRelation) left).getQualifier().equals(setQualifier)) { ((IntersectRelation) left).addRelation(right); return left; } else { return new IntersectRelation(Lists.newArrayList(left, right), setQualifier); } case StarRocksLexer.EXCEPT: case StarRocksLexer.MINUS: if (left instanceof ExceptRelation && ((ExceptRelation) left).getQualifier().equals(setQualifier)) { ((ExceptRelation) left).addRelation(right); return left; } else { return new ExceptRelation(Lists.newArrayList(left, right), setQualifier); } } throw new IllegalArgumentException("Unsupported set operation: " + context.operator.getText()); } @Override public ParseNode visitQuerySpecification(StarRocksParser.QuerySpecificationContext context) { Relation from = null; List<SelectListItem> selectItems = visit(context.selectItem(), SelectListItem.class); if (context.fromClause() instanceof StarRocksParser.DualContext) { if (selectItems.stream().anyMatch(SelectListItem::isStar)) { ErrorReport.reportSemanticException(ErrorCode.ERR_NO_TABLES_USED); } } else { StarRocksParser.FromContext fromContext = (StarRocksParser.FromContext) context.fromClause(); List<Relation> relations = visit(fromContext.relation(), Relation.class); if (!relations.isEmpty()) { Iterator<Relation> iterator = relations.iterator(); Relation relation = iterator.next(); while (iterator.hasNext()) { relation = new JoinRelation(null, relation, iterator.next(), null, false); } from = relation; } } /* from == null means a statement without from or from dual, add a single row of null values here, so that the semantics are the same, and the processing of subsequent query logic can be simplified, such as select sum(1) or select sum(1) from dual, will be converted to select sum(1) from (values(null)) t. This can share the same logic as select sum(1) from table */ if (from == null) { ArrayList<Expr> row = new ArrayList<>(); List<String> columnNames = new ArrayList<>(); row.add(NullLiteral.create(Type.NULL)); columnNames.add(""); List<ArrayList<Expr>> rows = new ArrayList<>(); rows.add(row); ValuesRelation valuesRelation = new ValuesRelation(rows, columnNames); valuesRelation.setNullValues(true); from = valuesRelation; } boolean isDistinct = context.setQuantifier() != null && context.setQuantifier().DISTINCT() != null; SelectList selectList = new SelectList(selectItems, isDistinct); if (context.hint() != null) { Map<String, String> selectHints = new HashMap<>(); for (StarRocksParser.HintContext hintContext : context.hint()) { for (StarRocksParser.HintMapContext hintMapContext : hintContext.hintMap()) { String key = hintMapContext.k.getText(); String value = hintMapContext.v.getText(); selectHints.put(key, value); } } selectList.setOptHints(selectHints); } return new SelectRelation( selectList, from, (Expr) visitIfPresent(context.where), (GroupByClause) visitIfPresent(context.groupingElement()), (Expr) visitIfPresent(context.having)); } @Override public ParseNode visitSelectSingle(StarRocksParser.SelectSingleContext context) { String alias = null; if (context.identifier() != null) { alias = ((Identifier) visit(context.identifier())).getValue(); } else if (context.string() != null) { alias = ((StringLiteral) visit(context.string())).getStringValue(); } return new SelectListItem((Expr) visit(context.expression()), alias); } @Override public ParseNode visitSelectAll(StarRocksParser.SelectAllContext context) { if (context.qualifiedName() != null) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); return new SelectListItem(qualifiedNameToTableName(qualifiedName)); } return new SelectListItem(null); } @Override public ParseNode visitSingleGroupingSet(StarRocksParser.SingleGroupingSetContext context) { return new GroupByClause(new ArrayList<>(visit(context.expression(), Expr.class)), GroupByClause.GroupingType.GROUP_BY); } @Override public ParseNode visitRollup(StarRocksParser.RollupContext context) { List<Expr> groupingExprs = visit(context.expression(), Expr.class); return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.ROLLUP); } @Override public ParseNode visitCube(StarRocksParser.CubeContext context) { List<Expr> groupingExprs = visit(context.expression(), Expr.class); return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.CUBE); } @Override public ParseNode visitMultipleGroupingSets(StarRocksParser.MultipleGroupingSetsContext context) { List<ArrayList<Expr>> groupingSets = new ArrayList<>(); for (StarRocksParser.GroupingSetContext groupingSetContext : context.groupingSet()) { List<Expr> l = visit(groupingSetContext.expression(), Expr.class); groupingSets.add(new ArrayList<>(l)); } return new GroupByClause(groupingSets, GroupByClause.GroupingType.GROUPING_SETS); } @Override public ParseNode visitGroupingOperation(StarRocksParser.GroupingOperationContext context) { List<Expr> arguments = visit(context.expression(), Expr.class); return new GroupingFunctionCallExpr("grouping", arguments); } @Override public ParseNode visitWindowFrame(StarRocksParser.WindowFrameContext context) { if (context.end != null) { return new AnalyticWindow( getFrameType(context.frameType), (AnalyticWindow.Boundary) visit(context.start), (AnalyticWindow.Boundary) visit(context.end)); } else { return new AnalyticWindow( getFrameType(context.frameType), (AnalyticWindow.Boundary) visit(context.start)); } } private static AnalyticWindow.Type getFrameType(Token type) { switch (type.getType()) { case StarRocksLexer.RANGE: return AnalyticWindow.Type.RANGE; case StarRocksLexer.ROWS: return AnalyticWindow.Type.ROWS; } throw new IllegalArgumentException("Unsupported frame type: " + type.getText()); } @Override public ParseNode visitUnboundedFrame(StarRocksParser.UnboundedFrameContext context) { return new AnalyticWindow.Boundary(getUnboundedFrameBoundType(context.boundType), null); } @Override public ParseNode visitBoundedFrame(StarRocksParser.BoundedFrameContext context) { return new AnalyticWindow.Boundary(getBoundedFrameBoundType(context.boundType), (Expr) visit(context.expression())); } @Override public ParseNode visitCurrentRowBound(StarRocksParser.CurrentRowBoundContext context) { return new AnalyticWindow.Boundary(AnalyticWindow.BoundaryType.CURRENT_ROW, null); } private static AnalyticWindow.BoundaryType getBoundedFrameBoundType(Token token) { switch (token.getType()) { case StarRocksLexer.PRECEDING: return AnalyticWindow.BoundaryType.PRECEDING; case StarRocksLexer.FOLLOWING: return AnalyticWindow.BoundaryType.FOLLOWING; } throw new IllegalArgumentException("Unsupported bound type: " + token.getText()); } private static AnalyticWindow.BoundaryType getUnboundedFrameBoundType(Token token) { switch (token.getType()) { case StarRocksLexer.PRECEDING: return AnalyticWindow.BoundaryType.UNBOUNDED_PRECEDING; case StarRocksLexer.FOLLOWING: return AnalyticWindow.BoundaryType.UNBOUNDED_FOLLOWING; } throw new IllegalArgumentException("Unsupported bound type: " + token.getText()); } @Override public ParseNode visitSortItem(StarRocksParser.SortItemContext context) { return new OrderByElement( (Expr) visit(context.expression()), getOrderingType(context.ordering), getNullOrderingType(getOrderingType(context.ordering), context.nullOrdering)); } private boolean getNullOrderingType(boolean isAsc, Token token) { if (token == null) { return (!SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_SORT_NULLS_LAST)) == isAsc; } switch (token.getType()) { case StarRocksLexer.FIRST: return true; case StarRocksLexer.LAST: return false; } throw new IllegalArgumentException("Unsupported ordering: " + token.getText()); } private static boolean getOrderingType(Token token) { if (token == null) { return true; } switch (token.getType()) { case StarRocksLexer.ASC: return true; case StarRocksLexer.DESC: return false; } throw new IllegalArgumentException("Unsupported ordering: " + token.getText()); } @Override public ParseNode visitLimitElement(StarRocksParser.LimitElementContext context) { long limit = Long.parseLong(context.limit.getText()); long offset = 0; if (context.offset != null) { offset = Long.parseLong(context.offset.getText()); } return new LimitElement(offset, limit); } @Override public ParseNode visitParenthesizedRelation(StarRocksParser.ParenthesizedRelationContext context) { return visit(context.relation()); } @Override public ParseNode visitTableName(StarRocksParser.TableNameContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName tableName = qualifiedNameToTableName(qualifiedName); PartitionNames partitionNames = null; if (context.partitionNames() != null) { partitionNames = (PartitionNames) visit(context.partitionNames()); } List<Long> tabletIds = Lists.newArrayList(); if (context.tabletList() != null) { tabletIds = context.tabletList().INTEGER_VALUE().stream().map(ParseTree::getText) .map(Long::parseLong).collect(toList()); } TableRelation tableRelation = new TableRelation(tableName, partitionNames, tabletIds); if (context.hint() != null) { for (TerminalNode hint : context.hint().IDENTIFIER()) { if (hint.getText().equalsIgnoreCase("_META_")) { tableRelation.setMetaQuery(true); } } } return tableRelation; } @Override public ParseNode visitAliasedRelation(StarRocksParser.AliasedRelationContext context) { Relation child = (Relation) visit(context.relationPrimary()); if (context.identifier() == null) { return child; } Identifier identifier = (Identifier) visit(context.identifier()); child.setAlias(new TableName(null, identifier.getValue())); return child; } @Override public ParseNode visitJoinRelation(StarRocksParser.JoinRelationContext context) { Relation left = (Relation) visit(context.left); Relation right = (Relation) visit(context.rightRelation); JoinOperator joinType = JoinOperator.INNER_JOIN; if (context.crossOrInnerJoinType() != null) { if (context.crossOrInnerJoinType().CROSS() != null) { joinType = JoinOperator.CROSS_JOIN; } else { joinType = JoinOperator.INNER_JOIN; } } else if (context.outerAndSemiJoinType().LEFT() != null) { if (context.outerAndSemiJoinType().OUTER() != null) { joinType = JoinOperator.LEFT_OUTER_JOIN; } else if (context.outerAndSemiJoinType().SEMI() != null) { joinType = JoinOperator.LEFT_SEMI_JOIN; } else if (context.outerAndSemiJoinType().ANTI() != null) { joinType = JoinOperator.LEFT_ANTI_JOIN; } else { joinType = JoinOperator.LEFT_OUTER_JOIN; } } else if (context.outerAndSemiJoinType().RIGHT() != null) { if (context.outerAndSemiJoinType().OUTER() != null) { joinType = JoinOperator.RIGHT_OUTER_JOIN; } else if (context.outerAndSemiJoinType().SEMI() != null) { joinType = JoinOperator.RIGHT_SEMI_JOIN; } else if (context.outerAndSemiJoinType().ANTI() != null) { joinType = JoinOperator.RIGHT_ANTI_JOIN; } else { joinType = JoinOperator.RIGHT_OUTER_JOIN; } } else if (context.outerAndSemiJoinType().FULL() != null) { joinType = JoinOperator.FULL_OUTER_JOIN; } Expr predicate = null; List<String> usingColNames = null; if (context.joinCriteria() != null) { if (context.joinCriteria().ON() != null) { predicate = (Expr) visit(context.joinCriteria().expression()); } else if (context.joinCriteria().USING() != null) { List<Identifier> criteria = visit(context.joinCriteria().identifier(), Identifier.class); usingColNames = criteria.stream().map(Identifier::getValue).collect(Collectors.toList()); } else { throw new IllegalArgumentException("Unsupported join criteria"); } } JoinRelation joinRelation = new JoinRelation(joinType, left, right, predicate, context.LATERAL() != null); joinRelation.setUsingColNames(usingColNames); if (context.hint() != null) { joinRelation.setJoinHint(context.hint().IDENTIFIER(0).getText()); } return joinRelation; } @Override public ParseNode visitInlineTable(StarRocksParser.InlineTableContext context) { List<ValueList> rowValues = visit(context.rowConstructor(), ValueList.class); List<ArrayList<Expr>> rows = rowValues.stream().map(ValueList::getFirstRow).collect(toList()); List<String> colNames = new ArrayList<>(); for (int i = 0; i < rows.get(0).size(); ++i) { colNames.add("column_" + i); } return new ValuesRelation(rows, colNames); } @Override public ParseNode visitTableFunction(StarRocksParser.TableFunctionContext context) { return new TableFunctionRelation(getQualifiedName(context.qualifiedName()).toString(), new FunctionParams(false, visit(context.expression(), Expr.class))); } @Override public ParseNode visitRowConstructor(StarRocksParser.RowConstructorContext context) { ArrayList<Expr> row = new ArrayList<>(visit(context.expression(), Expr.class)); return new ValueList(row); } @Override public ParseNode visitPartitionNames(StarRocksParser.PartitionNamesContext context) { List<Identifier> identifierList = visit(context.identifier(), Identifier.class); return new PartitionNames(context.TEMPORARY() != null, identifierList.stream().map(Identifier::getValue).collect(toList())); } @Override public ParseNode visitSubquery(StarRocksParser.SubqueryContext context) { return new SubqueryRelation(new QueryStatement((QueryRelation) visit(context.queryBody()))); } @Override public ParseNode visitSubqueryPrimary(StarRocksParser.SubqueryPrimaryContext context) { SubqueryRelation subqueryRelation = (SubqueryRelation) visit(context.subquery()); return subqueryRelation.getQueryStatement().getQueryRelation(); } @Override public ParseNode visitSubqueryRelation(StarRocksParser.SubqueryRelationContext context) { return visit(context.subquery()); } @Override public ParseNode visitSubqueryExpression(StarRocksParser.SubqueryExpressionContext context) { SubqueryRelation subqueryRelation = (SubqueryRelation) visit(context.subquery()); return new Subquery(subqueryRelation.getQueryStatement()); } @Override public ParseNode visitInSubquery(StarRocksParser.InSubqueryContext context) { boolean isNotIn = context.NOT() != null; QueryRelation query = (QueryRelation) visit(context.queryBody()); return new InPredicate((Expr) visit(context.value), new Subquery(new QueryStatement(query)), isNotIn); } @Override public ParseNode visitExists(StarRocksParser.ExistsContext context) { QueryRelation query = (QueryRelation) visit(context.queryBody()); return new ExistsPredicate(new Subquery(new QueryStatement(query)), false); } @Override public ParseNode visitScalarSubquery(StarRocksParser.ScalarSubqueryContext context) { BinaryPredicate.Operator op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0)) .getSymbol()); Subquery subquery = new Subquery(new QueryStatement((QueryRelation) visit(context.queryBody()))); return new BinaryPredicate(op, (Expr) visit(context.booleanExpression()), subquery); } @Override @Override public ParseNode visitUse(StarRocksParser.UseContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new UseStmt(identifier.getValue()); } @Override public ParseNode visitAdminSetConfig(StarRocksParser.AdminSetConfigContext context) { Map<String, String> configs = new HashMap<>(); Property property = (Property) visitProperty(context.property()); String configKey = property.getKey(); String configValue = property.getValue(); configs.put(configKey, configValue); return new AdminSetConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, configs); } @Override public ParseNode visitGrantRole(StarRocksParser.GrantRoleContext context) { UserIdentifier user = (UserIdentifier) visit(context.user()); Identifier identifier = (Identifier) visit(context.identifierOrString()); return new GrantRoleStmt(identifier.getValue(), user.getUserIdentity()); } @Override public ParseNode visitRevokeRole(StarRocksParser.RevokeRoleContext context) { UserIdentifier user = (UserIdentifier) visit(context.user()); Identifier identifier = (Identifier) visit(context.identifierOrString()); return new RevokeRoleStmt(identifier.getValue(), user.getUserIdentity()); } @Override public ParseNode visitShowVariablesStatement(StarRocksParser.ShowVariablesStatementContext context) { String pattern = null; if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); pattern = stringLiteral.getValue(); } Expr where = null; if (context.expression() != null) { where = (Expr) visit(context.expression()); } return new ShowVariablesStmt(getVariableType(context.varType()), pattern, where); } @Override public ParseNode visitExpressionOrDefault(StarRocksParser.ExpressionOrDefaultContext context) { if (context.DEFAULT() != null) { return new DefaultValueExpr(); } else { return visit(context.expression()); } } @Override public ParseNode visitExpressionsWithDefault(StarRocksParser.ExpressionsWithDefaultContext context) { ArrayList<Expr> row = Lists.newArrayList(); for (int i = 0; i < context.expressionOrDefault().size(); ++i) { row.add((Expr) visit(context.expressionOrDefault(i))); } return new ValueList(row); } @Override public ParseNode visitLogicalNot(StarRocksParser.LogicalNotContext context) { return new CompoundPredicate(CompoundPredicate.Operator.NOT, (Expr) visit(context.expression()), null); } @Override public ParseNode visitLogicalBinary(StarRocksParser.LogicalBinaryContext context) { Expr left = (Expr) visit(context.left); Expr right = (Expr) visit(context.right); if (context.operator.getType() == StarRocksLexer.LOGICAL_OR) { return new CompoundPredicate(CompoundPredicate.Operator.OR, left, right); } else { return new CompoundPredicate(getLogicalBinaryOperator(context.operator), left, right); } } private static CompoundPredicate.Operator getLogicalBinaryOperator(Token token) { switch (token.getType()) { case StarRocksLexer.AND: return CompoundPredicate.Operator.AND; case StarRocksLexer.OR: return CompoundPredicate.Operator.OR; } throw new IllegalArgumentException("Unsupported operator: " + token.getText()); } @Override public ParseNode visitPredicate(StarRocksParser.PredicateContext context) { if (context.predicateOperations() != null) { return visit(context.predicateOperations()); } else { return visit(context.valueExpression()); } } @Override public ParseNode visitIsNull(StarRocksParser.IsNullContext context) { Expr child = (Expr) visit(context.booleanExpression()); if (context.NOT() == null) { return new IsNullPredicate(child, false); } else { return new IsNullPredicate(child, true); } } @Override public ParseNode visitComparison(StarRocksParser.ComparisonContext context) { BinaryPredicate.Operator op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0)) .getSymbol()); return new BinaryPredicate(op, (Expr) visit(context.left), (Expr) visit(context.right)); } private static BinaryPredicate.Operator getComparisonOperator(Token symbol) { switch (symbol.getType()) { case StarRocksParser.EQ: return BinaryPredicate.Operator.EQ; case StarRocksParser.NEQ: return BinaryPredicate.Operator.NE; case StarRocksParser.LT: return BinaryPredicate.Operator.LT; case StarRocksParser.LTE: return BinaryPredicate.Operator.LE; case StarRocksParser.GT: return BinaryPredicate.Operator.GT; case StarRocksParser.GTE: return BinaryPredicate.Operator.GE; case StarRocksParser.EQ_FOR_NULL: return BinaryPredicate.Operator.EQ_FOR_NULL; } throw new IllegalArgumentException("Unsupported operator: " + symbol.getText()); } @Override public ParseNode visitInList(StarRocksParser.InListContext context) { boolean isNotIn = context.NOT() != null; return new InPredicate( (Expr) visit(context.value), visit(context.expression(), Expr.class), isNotIn); } @Override public ParseNode visitBetween(StarRocksParser.BetweenContext context) { boolean isNotBetween = context.NOT() != null; return new BetweenPredicate( (Expr) visit(context.value), (Expr) visit(context.lower), (Expr) visit(context.upper), isNotBetween); } @Override public ParseNode visitLike(StarRocksParser.LikeContext context) { LikePredicate likePredicate; if (context.REGEXP() != null || context.RLIKE() != null) { likePredicate = new LikePredicate(LikePredicate.Operator.REGEXP, (Expr) visit(context.value), (Expr) visit(context.pattern)); } else { likePredicate = new LikePredicate( LikePredicate.Operator.LIKE, (Expr) visit(context.value), (Expr) visit(context.pattern)); } if (context.NOT() != null) { return new CompoundPredicate(CompoundPredicate.Operator.NOT, likePredicate, null); } else { return likePredicate; } } @Override public ParseNode visitSimpleCase(StarRocksParser.SimpleCaseContext context) { return new CaseExpr( (Expr) visit(context.caseExpr), visit(context.whenClause(), CaseWhenClause.class), (Expr) visitIfPresent(context.elseExpression)); } @Override public ParseNode visitSearchedCase(StarRocksParser.SearchedCaseContext context) { return new CaseExpr( null, visit(context.whenClause(), CaseWhenClause.class), (Expr) visitIfPresent(context.elseExpression)); } @Override public ParseNode visitWhenClause(StarRocksParser.WhenClauseContext context) { return new CaseWhenClause((Expr) visit(context.condition), (Expr) visit(context.result)); } @Override public ParseNode visitArithmeticUnary(StarRocksParser.ArithmeticUnaryContext context) { Expr child = (Expr) visit(context.primaryExpression()); switch (context.operator.getType()) { case StarRocksLexer.MINUS_SYMBOL: if (child.isLiteral() && child.getType().isNumericType()) { try { ((LiteralExpr) child).swapSign(); } catch (NotImplementedException e) { throw new ParsingException(e.getMessage()); } return child; } else { return new ArithmeticExpr(ArithmeticExpr.Operator.MULTIPLY, new IntLiteral(-1), child); } case StarRocksLexer.PLUS_SYMBOL: return child; case StarRocksLexer.BITNOT: return new ArithmeticExpr(ArithmeticExpr.Operator.BITNOT, child, null); case StarRocksLexer.LOGICAL_NOT: return new CompoundPredicate(CompoundPredicate.Operator.NOT, child, null); default: throw new UnsupportedOperationException("Unsupported sign: " + context.operator.getText()); } } @Override public ParseNode visitArithmeticBinary(StarRocksParser.ArithmeticBinaryContext context) { Expr left = (Expr) visit(context.left); Expr right = (Expr) visit(context.right); if (left instanceof IntervalLiteral) { return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), right, ((IntervalLiteral) left).getValue(), ((IntervalLiteral) left).getUnitIdentifier().getDescription(), true); } if (right instanceof IntervalLiteral) { return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), left, ((IntervalLiteral) right).getValue(), ((IntervalLiteral) right).getUnitIdentifier().getDescription(), false); } return new ArithmeticExpr(getArithmeticBinaryOperator(context.operator), left, right); } private static ArithmeticExpr.Operator getArithmeticBinaryOperator(Token operator) { switch (operator.getType()) { case StarRocksLexer.PLUS_SYMBOL: return ArithmeticExpr.Operator.ADD; case StarRocksLexer.MINUS_SYMBOL: return ArithmeticExpr.Operator.SUBTRACT; case StarRocksLexer.ASTERISK_SYMBOL: return ArithmeticExpr.Operator.MULTIPLY; case StarRocksLexer.SLASH_SYMBOL: return ArithmeticExpr.Operator.DIVIDE; case StarRocksLexer.PERCENT_SYMBOL: return ArithmeticExpr.Operator.MOD; case StarRocksLexer.INT_DIV: return ArithmeticExpr.Operator.INT_DIVIDE; case StarRocksLexer.BITAND: return ArithmeticExpr.Operator.BITAND; case StarRocksLexer.BITOR: return ArithmeticExpr.Operator.BITOR; case StarRocksLexer.BITXOR: return ArithmeticExpr.Operator.BITXOR; } throw new UnsupportedOperationException("Unsupported operator: " + operator.getText()); } @Override public ParseNode visitOdbcFunctionCallExpression(StarRocksParser.OdbcFunctionCallExpressionContext context) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall()); OdbcScalarFunctionCall odbcScalarFunctionCall = new OdbcScalarFunctionCall(functionCallExpr); return odbcScalarFunctionCall.mappingFunction(); } private static final List<String> DATE_FUNCTIONS = Lists.newArrayList("DATE_ADD", "ADDDATE", "DAYS_ADD", "DATE_SUB", "SUBDATE", "DAYS_SUB", "DATE_FLOOR"); @Override public ParseNode visitSimpleFunctionCall(StarRocksParser.SimpleFunctionCallContext context) { String functionName = getQualifiedName(context.qualifiedName()).toString(); if (DATE_FUNCTIONS.contains(functionName.toUpperCase())) { if (context.expression().size() != 2) { throw new ParsingException( functionName + " must as format " + functionName + "(date,INTERVAL expr unit)"); } Expr e1 = (Expr) visit(context.expression(0)); Expr e2 = (Expr) visit(context.expression(1)); if (!(e2 instanceof IntervalLiteral)) { e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY")); } IntervalLiteral intervalLiteral = (IntervalLiteral) e2; return new TimestampArithmeticExpr(functionName, e1, intervalLiteral.getValue(), intervalLiteral.getUnitIdentifier().getDescription()); } if (functionName.equalsIgnoreCase("isnull")) { List<Expr> params = visit(context.expression(), Expr.class); if (params.size() != 1) { throw new SemanticException("No matching function with signature: %s(%s).", functionName, Joiner.on(", ").join(params.stream().map(p -> p.getType().toSql()).collect(toList()))); } return new IsNullPredicate(params.get(0), false); } FunctionCallExpr functionCallExpr = new FunctionCallExpr(getQualifiedName(context.qualifiedName()).toString(), new FunctionParams(false, visit(context.expression(), Expr.class))); if (context.over() != null) { return buildOverClause(functionCallExpr, context.over()); } return functionCallExpr; } @Override public ParseNode visitAggregationFunctionCall(StarRocksParser.AggregationFunctionCallContext context) { String functionName; if (context.aggregationFunction().COUNT() != null) { functionName = "count"; } else if (context.aggregationFunction().AVG() != null) { functionName = "avg"; } else if (context.aggregationFunction().SUM() != null) { functionName = "sum"; } else if (context.aggregationFunction().MIN() != null) { functionName = "min"; } else if (context.aggregationFunction().MAX() != null) { functionName = "max"; } else { throw new StarRocksPlannerException("Aggregate functions are not being parsed correctly", ErrorType.INTERNAL_ERROR); } FunctionCallExpr functionCallExpr = new FunctionCallExpr(functionName, context.aggregationFunction().ASTERISK_SYMBOL() == null ? new FunctionParams(context.aggregationFunction().DISTINCT() != null, visit(context.aggregationFunction().expression(), Expr.class)) : FunctionParams.createStarParam()); if (context.over() != null) { return buildOverClause(functionCallExpr, context.over()); } return functionCallExpr; } @Override public ParseNode visitWindowFunctionCall(StarRocksParser.WindowFunctionCallContext context) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.windowFunction()); return buildOverClause(functionCallExpr, context.over()); } public static final ImmutableSet<String> WindowFunctionSet = ImmutableSet.of( "row_number", "rank", "dense_rank", "lead", "lag", "first_value", "last_value"); @Override public ParseNode visitWindowFunction(StarRocksParser.WindowFunctionContext context) { if (WindowFunctionSet.contains(context.name.getText().toLowerCase())) { return new FunctionCallExpr(context.name.getText().toLowerCase(), new FunctionParams(false, visit(context.expression(), Expr.class))); } throw new ParsingException("Unknown window function " + context.name.getText()); } private AnalyticExpr buildOverClause(FunctionCallExpr functionCallExpr, StarRocksParser.OverContext context) { functionCallExpr.setIsAnalyticFnCall(true); List<OrderByElement> orderByElements = new ArrayList<>(); if (context.ORDER() != null) { orderByElements = visit(context.sortItem(), OrderByElement.class); } List<Expr> partitionExprs = visit(context.partition, Expr.class); return new AnalyticExpr(functionCallExpr, partitionExprs, orderByElements, (AnalyticWindow) visitIfPresent(context.windowFrame())); } @Override public ParseNode visitExtract(StarRocksParser.ExtractContext context) { String fieldString = context.identifier().getText(); return new FunctionCallExpr(fieldString, new FunctionParams(Lists.newArrayList((Expr) visit(context.valueExpression())))); } @Override public ParseNode visitCast(StarRocksParser.CastContext context) { return new CastExpr(new TypeDef(getType(context.type())), (Expr) visit(context.expression())); } @Override public ParseNode visitInformationFunctionExpression(StarRocksParser.InformationFunctionExpressionContext context) { if (context.name.getText().equalsIgnoreCase("database") || context.name.getText().equalsIgnoreCase("schema") || context.name.getText().equalsIgnoreCase("user") || context.name.getText().equalsIgnoreCase("current_user") || context.name.getText().equalsIgnoreCase("connection_id")) { return new InformationFunction(context.name.getText().toUpperCase()); } throw new ParsingException("Unknown special function " + context.name.getText()); } @Override public ParseNode visitSpecialFunctionExpression(StarRocksParser.SpecialFunctionExpressionContext context) { if (context.CHAR() != null) { return new FunctionCallExpr("char", visit(context.expression(), Expr.class)); } else if (context.DAY() != null) { return new FunctionCallExpr("day", visit(context.expression(), Expr.class)); } else if (context.HOUR() != null) { return new FunctionCallExpr("hour", visit(context.expression(), Expr.class)); } else if (context.IF() != null) { return new FunctionCallExpr("if", visit(context.expression(), Expr.class)); } else if (context.LEFT() != null) { return new FunctionCallExpr("left", visit(context.expression(), Expr.class)); } else if (context.LIKE() != null) { return new FunctionCallExpr("like", visit(context.expression(), Expr.class)); } else if (context.MINUTE() != null) { return new FunctionCallExpr("minute", visit(context.expression(), Expr.class)); } else if (context.MOD() != null) { return new FunctionCallExpr("mod", visit(context.expression(), Expr.class)); } else if (context.MONTH() != null) { return new FunctionCallExpr("month", visit(context.expression(), Expr.class)); } else if (context.QUARTER() != null) { return new FunctionCallExpr("quarter", visit(context.expression(), Expr.class)); } else if (context.REGEXP() != null) { return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class)); } else if (context.REPLACE() != null) { return new FunctionCallExpr("replace", visit(context.expression(), Expr.class)); } else if (context.RIGHT() != null) { return new FunctionCallExpr("right", visit(context.expression(), Expr.class)); } else if (context.RLIKE() != null) { return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class)); } else if (context.SECOND() != null) { return new FunctionCallExpr("second", visit(context.expression(), Expr.class)); } else if (context.YEAR() != null) { return new FunctionCallExpr("year", visit(context.expression(), Expr.class)); } else if (context.PASSWORD() != null) { return new StringLiteral(new String(MysqlPassword.makeScrambledPassword(context.string().getText()))); } if (context.TIMESTAMPADD() != null || context.TIMESTAMPDIFF() != null) { String functionName = context.TIMESTAMPADD() != null ? "TIMESTAMPADD" : "TIMESTAMPDIFF"; UnitIdentifier e1 = (UnitIdentifier) visit(context.unitIdentifier()); Expr e2 = (Expr) visit(context.expression(0)); Expr e3 = (Expr) visit(context.expression(1)); return new TimestampArithmeticExpr(functionName, e3, e2, e1.getDescription()); } throw new ParsingException("No matching function with signature: %s(%s).", context.getText(), visit(context.expression(), Expr.class)); } @Override public ParseNode visitConcat(StarRocksParser.ConcatContext context) { Expr left = (Expr) visit(context.left); Expr right = (Expr) visit(context.right); return new FunctionCallExpr("concat", new FunctionParams(Lists.newArrayList(left, right))); } @Override public ParseNode visitNullLiteral(StarRocksParser.NullLiteralContext context) { return new NullLiteral(); } @Override public ParseNode visitBooleanLiteral(StarRocksParser.BooleanLiteralContext context) { try { return new BoolLiteral(context.getText()); } catch (AnalysisException e) { throw new ParsingException("Invalid boolean literal: " + context.getText()); } } @Override public ParseNode visitNumericLiteral(StarRocksParser.NumericLiteralContext context) { return visit(context.number()); } private static final BigInteger LONG_MAX = new BigInteger("9223372036854775807"); private static final BigInteger LARGEINT_MAX_ABS = new BigInteger("170141183460469231731687303715884105728"); @Override public ParseNode visitIntegerValue(StarRocksParser.IntegerValueContext context) { try { BigInteger intLiteral = new BigInteger(context.getText()); if (intLiteral.compareTo(LONG_MAX) <= 0) { return new IntLiteral(intLiteral.longValue()); } else if (intLiteral.compareTo(LARGEINT_MAX_ABS) <= 0) { return new LargeIntLiteral(intLiteral.toString()); } else { throw new ParsingException("Numeric overflow " + intLiteral); } } catch (NumberFormatException | AnalysisException e) { throw new ParsingException("Invalid numeric literal: " + context.getText()); } } @Override public ParseNode visitDoubleValue(StarRocksParser.DoubleValueContext context) { try { BigDecimal decimal = new BigDecimal(context.getText()); int precision = DecimalLiteral.getRealPrecision(decimal); int scale = DecimalLiteral.getRealScale(decimal); int integerPartWidth = precision - scale; if (integerPartWidth > 38) { return new FloatLiteral(context.getText()); } return new DecimalLiteral(decimal); } catch (AnalysisException | NumberFormatException e) { throw new ParsingException(e.getMessage()); } } @Override public ParseNode visitDecimalValue(StarRocksParser.DecimalValueContext context) { try { return new DecimalLiteral(context.getText()); } catch (AnalysisException e) { throw new ParsingException(e.getMessage()); } } @Override public ParseNode visitString(StarRocksParser.StringContext context) { String quotedString; if (context.SINGLE_QUOTED_TEXT() != null) { quotedString = context.SINGLE_QUOTED_TEXT().getText(); return new StringLiteral(escapeBackSlash(quotedString.substring(1, quotedString.length() - 1))); } else { quotedString = context.DOUBLE_QUOTED_TEXT().getText(); return new StringLiteral(escapeBackSlash(quotedString.substring(1, quotedString.length() - 1)) .replace("\"\"", "\"")); } } private static String escapeBackSlash(String str) { StringWriter writer = new StringWriter(); int strLen = str.length(); for (int i = 0; i < strLen; ++i) { char c = str.charAt(i); if (c == '\\' && (i + 1) < strLen) { switch (str.charAt(i + 1)) { case 'n': writer.append('\n'); break; case 't': writer.append('\t'); break; case 'r': writer.append('\r'); break; case 'b': writer.append('\b'); break; case '0': writer.append('\0'); break; case 'Z': writer.append('\032'); break; case '_': case '%': writer.append('\\'); /* Fall through */ default: writer.append(str.charAt(i + 1)); break; } i++; } else { writer.append(c); } } return writer.toString(); } @Override public ParseNode visitArrayConstructor(StarRocksParser.ArrayConstructorContext context) { if (context.arrayType() != null) { return new ArrayExpr( new ArrayType(getType(context.arrayType().type())), visit(context.expression(), Expr.class)); } return new ArrayExpr(null, visit(context.expression(), Expr.class)); } @Override public ParseNode visitArraySubscript(StarRocksParser.ArraySubscriptContext context) { Expr value = (Expr) visit(context.value); Expr index = (Expr) visit(context.index); return new ArrayElementExpr(value, index); } @Override public ParseNode visitArraySlice(StarRocksParser.ArraySliceContext context) { throw new ParsingException("Array slice is not currently supported"); /* Expr expr = (Expr) visit(context.primaryExpression()); IntLiteral lowerBound; if (context.start != null) { lowerBound = new IntLiteral(Long.parseLong(context.start.getText())); } else { lowerBound = new IntLiteral(0); } IntLiteral upperBound; if (context.end != null) { upperBound = new IntLiteral(Long.parseLong(context.end.getText())); } else { upperBound = new IntLiteral(-1); } return new ArraySliceExpr(expr, lowerBound, upperBound); */ } @Override public ParseNode visitInterval(StarRocksParser.IntervalContext context) { return new IntervalLiteral((Expr) visit(context.value), (UnitIdentifier) visit(context.from)); } @Override public ParseNode visitUnitIdentifier(StarRocksParser.UnitIdentifierContext context) { return new UnitIdentifier(context.getText()); } @Override public ParseNode visitTypeConstructor(StarRocksParser.TypeConstructorContext context) { String value = ((StringLiteral) visit(context.string())).getValue(); try { if (context.DATE() != null) { return new DateLiteral(value, Type.DATE); } if (context.DATETIME() != null) { return new DateLiteral(value, Type.DATETIME); } } catch (AnalysisException e) { throw new ParsingException(e.getMessage()); } throw new ParsingException("Parse Error : unknown type " + context.getText()); } @Override public ParseNode visitColumnReference(StarRocksParser.ColumnReferenceContext context) { if (context.identifier() != null) { Identifier identifier = (Identifier) visit(context.identifier()); return new SlotRef(null, identifier.getValue(), identifier.getValue()); } else { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); if (qualifiedName.getParts().size() == 3) { return new SlotRef(new TableName(qualifiedName.getParts().get(0), qualifiedName.getParts().get(1)), qualifiedName.getParts().get(2), qualifiedName.getParts().get(2)); } else if (qualifiedName.getParts().size() == 2) { return new SlotRef(new TableName(null, qualifiedName.getParts().get(0)), qualifiedName.getParts().get(1), qualifiedName.getParts().get(1)); } else { throw new ParsingException("Unqualified column reference " + qualifiedName); } } } @Override public ParseNode visitArrowExpression(StarRocksParser.ArrowExpressionContext context) { Expr expr = (Expr) visit(context.primaryExpression()); StringLiteral stringLiteral = (StringLiteral) visit(context.string()); return new ArrowExpr(expr, stringLiteral); } @Override public ParseNode visitVariable(StarRocksParser.VariableContext context) { SetType setType = SetType.DEFAULT; if (context.GLOBAL() != null) { setType = SetType.GLOBAL; } else if (context.LOCAL() != null || context.SESSION() != null) { setType = SetType.SESSION; } return new SysVariableDesc(context.identifier().getText(), setType); } @Override public ParseNode visitCollate(StarRocksParser.CollateContext context) { return visit(context.primaryExpression()); } @Override public ParseNode visitParenthesizedExpression(StarRocksParser.ParenthesizedExpressionContext context) { return visit(context.expression()); } @Override public ParseNode visitUnquotedIdentifier(StarRocksParser.UnquotedIdentifierContext context) { return new Identifier(context.getText()); } @Override public ParseNode visitBackQuotedIdentifier(StarRocksParser.BackQuotedIdentifierContext context) { return new Identifier(context.getText().replace("`", "")); } @Override public ParseNode visitDigitIdentifier(StarRocksParser.DigitIdentifierContext context) { return new Identifier(context.getText()); } private static StatementBase.ExplainLevel getExplainType(StarRocksParser.ExplainDescContext context) { StatementBase.ExplainLevel explainLevel = StatementBase.ExplainLevel.NORMAL; if (context.LOGICAL() != null) { explainLevel = StatementBase.ExplainLevel.LOGICAL; } else if (context.VERBOSE() != null) { explainLevel = StatementBase.ExplainLevel.VERBOSE; } else if (context.COSTS() != null) { explainLevel = StatementBase.ExplainLevel.COST; } return explainLevel; } public static SetType getVariableType(StarRocksParser.VarTypeContext context) { if (context == null) { return SetType.DEFAULT; } if (context.GLOBAL() != null) { return SetType.GLOBAL; } else if (context.LOCAL() != null || context.SESSION() != null) { return SetType.SESSION; } else { return SetType.DEFAULT; } } @Override public ParseNode visitAssignment(StarRocksParser.AssignmentContext context) { String column = ((Identifier) visit(context.identifier())).getValue(); Expr expr = (Expr) visit(context.expressionOrDefault()); return new ColumnAssignment(column, expr); } @Override public ParseNode visitPartitionDesc(StarRocksParser.PartitionDescContext context) { List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class); List<PartitionDesc> partitionDesc = visit(context.rangePartitionDesc(), PartitionDesc.class); return new RangePartitionDesc( identifierList.stream().map(Identifier::getValue).collect(toList()), partitionDesc); } @Override public ParseNode visitSingleRangePartition(StarRocksParser.SingleRangePartitionContext context) { PartitionKeyDesc partitionKeyDesc = (PartitionKeyDesc) visit(context.partitionKeyDesc()); return new SingleRangePartitionDesc(false, ((Identifier) visit(context.identifier())).getValue(), partitionKeyDesc, null); } @Override public ParseNode visitMultiRangePartition(StarRocksParser.MultiRangePartitionContext context) { if (context.interval() != null) { IntervalLiteral intervalLiteral = (IntervalLiteral) visit(context.interval()); Expr expr = intervalLiteral.getValue(); long intervalVal; if (expr instanceof IntLiteral) { intervalVal = ((IntLiteral) expr).getLongValue(); } else { throw new IllegalArgumentException("Unsupported interval expr: " + expr); } return new MultiRangePartitionDesc( ((StringLiteral) visit(context.string(0))).getStringValue(), ((StringLiteral) visit(context.string(1))).getStringValue(), intervalVal, intervalLiteral.getUnitIdentifier().getDescription()); } else { return new MultiRangePartitionDesc( ((StringLiteral) visit(context.string(0))).getStringValue(), ((StringLiteral) visit(context.string(1))).getStringValue(), Long.parseLong(context.INTEGER_VALUE().getText())); } } @Override public ParseNode visitPartitionKeyDesc(StarRocksParser.PartitionKeyDescContext context) { PartitionKeyDesc partitionKeyDesc; if (context.LESS() != null) { List<PartitionValue> partitionValueList = visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class); partitionKeyDesc = new PartitionKeyDesc(partitionValueList); } else { List<PartitionValue> lowerPartitionValueList = visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class); List<PartitionValue> upperPartitionValueList = visit(context.partitionValueList().get(1).partitionValue(), PartitionValue.class); partitionKeyDesc = new PartitionKeyDesc(lowerPartitionValueList, upperPartitionValueList); } return partitionKeyDesc; } @Override public ParseNode visitPartitionValue(StarRocksParser.PartitionValueContext context) { if (context.MAXVALUE() != null) { return PartitionValue.MAX_VALUE; } else { return new PartitionValue(((StringLiteral) visit(context.string())).getStringValue()); } } @Override public ParseNode visitDistributionDesc(StarRocksParser.DistributionDescContext context) { int buckets = 10; if (context.INTEGER_VALUE() != null) { buckets = Integer.parseInt(context.INTEGER_VALUE().getText()); } List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class); return new HashDistributionDesc(buckets, identifierList.stream().map(Identifier::getValue).collect(toList())); } @Override public ParseNode visitRefreshSchemeDesc(StarRocksParser.RefreshSchemeDescContext context) { LocalDateTime startTime = LocalDateTime.now(); IntervalLiteral intervalLiteral = null; if (context.ASYNC() != null) { if (context.START() != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.string()); DateTimeFormatter dateTimeFormatter = null; try { dateTimeFormatter = DateUtils.probeFormat(stringLiteral.getStringValue()); LocalDateTime tempStartTime = DateUtils. parseStringWithDefaultHSM(stringLiteral.getStringValue(), dateTimeFormatter); if (tempStartTime.isBefore(LocalDateTime.now())) { throw new IllegalArgumentException("Refresh start must be after current time"); } startTime = tempStartTime; } catch (AnalysisException e) { throw new IllegalArgumentException( "Refresh start " + stringLiteral.getStringValue() + " is incorrect"); } } intervalLiteral = (IntervalLiteral) visit(context.interval()); if (!(intervalLiteral.getValue() instanceof IntLiteral)) { throw new IllegalArgumentException( "Refresh every " + intervalLiteral.getValue() + " must be IntLiteral"); } return new AsyncRefreshSchemeDesc(startTime, intervalLiteral); } else if (context.SYNC() != null) { return new SyncRefreshSchemeDesc(); } else if (context.MANUAL() != null) { return new ManualRefreshSchemeDesc(); } return null; } @Override public ParseNode visitProperty(StarRocksParser.PropertyContext context) { return new Property( ((StringLiteral) visit(context.key)).getStringValue(), ((StringLiteral) visit(context.value)).getStringValue()); } @Override public ParseNode visitOutfile(StarRocksParser.OutfileContext context) { Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } String format = null; if (context.fileFormat() != null) { if (context.fileFormat().identifier() != null) { format = ((Identifier) visit(context.fileFormat().identifier())).getValue(); } else if (context.fileFormat().string() != null) { format = ((StringLiteral) visit(context.fileFormat().string())).getStringValue(); } } return new OutFileClause( ((StringLiteral) visit(context.file)).getStringValue(), format, properties); } @Override public ParseNode visitColumnNameWithComment(StarRocksParser.ColumnNameWithCommentContext context) { String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } return new ColWithComment(((Identifier) visit(context.identifier())).getValue(), comment); } @Override public ParseNode visitIdentifierOrString(StarRocksParser.IdentifierOrStringContext context) { String s = null; if (context.identifier() != null) { s = ((Identifier) visit(context.identifier())).getValue(); } else if (context.string() != null) { s = ((StringLiteral) visit(context.string())).getStringValue(); } return new Identifier(s); } @Override public ParseNode visitUserWithHostAndBlanket(StarRocksParser.UserWithHostAndBlanketContext context) { Identifier user = (Identifier) visit(context.identifierOrString(0)); Identifier host = (Identifier) visit(context.identifierOrString(1)); return new UserIdentifier(user.getValue(), host.getValue(), true); } @Override public ParseNode visitUserWithHost(StarRocksParser.UserWithHostContext context) { Identifier user = (Identifier) visit(context.identifierOrString(0)); Identifier host = (Identifier) visit(context.identifierOrString(1)); return new UserIdentifier(user.getValue(), host.getValue(), false); } @Override public ParseNode visitUserWithoutHost(StarRocksParser.UserWithoutHostContext context) { Identifier user = (Identifier) visit(context.identifierOrString()); return new UserIdentifier(user.getValue(), "%", false); } private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz) { return contexts.stream() .map(this::visit) .map(clazz::cast) .collect(toList()); } private <T> List<T> visitIfPresent(List<? extends ParserRuleContext> contexts, Class<T> clazz) { if (contexts != null && contexts.size() != 0) { return contexts.stream() .map(this::visit) .map(clazz::cast) .collect(toList()); } else { return null; } } private ParseNode visitIfPresent(ParserRuleContext context) { if (context != null) { return visit(context); } else { return null; } } private QualifiedName getQualifiedName(StarRocksParser.QualifiedNameContext context) { List<String> parts = visit(context.identifier(), Identifier.class).stream() .map(Identifier::getValue) .collect(Collectors.toList()); return QualifiedName.of(parts); } private TableName qualifiedNameToTableName(QualifiedName qualifiedName) { if (qualifiedName.getParts().size() == 2) { return new TableName(qualifiedName.getParts().get(0), qualifiedName.getParts().get(1)); } else if (qualifiedName.getParts().size() == 1) { return new TableName(null, qualifiedName.getParts().get(0)); } else { throw new ParsingException("error table name "); } } private Type getType(StarRocksParser.TypeContext type) { Integer length = null; Integer precision = null; Integer scale = null; if (type.baseType() != null) { if (type.baseType().typeParameter() != null) { length = Integer.parseInt(type.baseType().typeParameter().INTEGER_VALUE().toString()); } return ScalarType.createTypeFromParser(type.baseType().getText(), length, precision, scale); } else if (type.decimalType() != null) { if (type.precision != null) { precision = Integer.parseInt(type.precision.getText()); scale = type.scale == null ? ScalarType.DEFAULT_SCALE : Integer.parseInt(type.scale.getText()); } return ScalarType.createTypeFromParser(type.decimalType().getText(), length, precision, scale); } else if (type.arrayType() != null) { StarRocksParser.ArrayTypeContext arrayTypeContext = type.arrayType(); return new ArrayType(getType(arrayTypeContext.type())); } throw new IllegalArgumentException("Unsupported type specification: " + type.getText()); } }
class AstBuilder extends StarRocksBaseVisitor<ParseNode> { private final long sqlMode; public AstBuilder(long sqlMode) { this.sqlMode = sqlMode; } @Override public ParseNode visitSingleStatement(StarRocksParser.SingleStatementContext context) { return visit(context.statement()); } @Override public ParseNode visitCreateTableAsSelectStatement(StarRocksParser.CreateTableAsSelectStatementContext context) { Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } CreateTableStmt createTableStmt = new CreateTableStmt( context.IF() != null, false, qualifiedNameToTableName(getQualifiedName(context.qualifiedName())), null, "olap", null, context.partitionDesc() == null ? null : (PartitionDesc) visit(context.partitionDesc()), context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()), properties, null, context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue()); List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class); return new CreateTableAsSelectStmt( createTableStmt, columns == null ? null : columns.stream().map(Identifier::getValue).collect(toList()), (QueryStatement) visit(context.queryStatement())); } @Override public ParseNode visitAlterTableStatement(StarRocksParser.AlterTableStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<AlterClause> alterClauses = visit(context.alterClause(), AlterClause.class); return new AlterTableStmt(targetTableName, alterClauses); } @Override public ParseNode visitDropTableStatement(StarRocksParser.DropTableStatementContext context) { boolean ifExists = context.IF() != null && context.EXISTS() != null; boolean force = context.FORCE() != null; QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new DropTableStmt(ifExists, targetTableName, force); } @Override public ParseNode visitShowTableStatement(StarRocksParser.ShowTableStatementContext context) { boolean isVerbose = context.FULL() != null; String database = null; if (context.qualifiedName() != null) { database = getQualifiedName(context.qualifiedName()).toString(); } if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); return new ShowTableStmt(database, isVerbose, stringLiteral.getValue()); } else if (context.expression() != null) { return new ShowTableStmt(database, isVerbose, null, (Expr) visit(context.expression())); } else { return new ShowTableStmt(database, isVerbose, null); } } @Override public ParseNode visitCreateIndexStatement(StarRocksParser.CreateIndexStatementContext context) { String indexName = ((Identifier) visit(context.identifier())).getValue(); List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class); String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } IndexDef indexDef = new IndexDef(indexName, columnList.stream().map(Identifier::getValue).collect(toList()), IndexDef.IndexType.BITMAP, comment); CreateIndexClause createIndexClause = new CreateIndexClause(null, indexDef, false); QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new AlterTableStmt(targetTableName, Lists.newArrayList(createIndexClause)); } @Override public ParseNode visitDropIndexStatement(StarRocksParser.DropIndexStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); DropIndexClause dropIndexClause = new DropIndexClause(identifier.getValue(), null, false); QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new AlterTableStmt(targetTableName, Lists.newArrayList(dropIndexClause)); } @Override public ParseNode visitShowColumnStatement(StarRocksParser.ShowColumnStatementContext context) { QualifiedName tableName = getQualifiedName(context.table); QualifiedName dbName = null; if (context.db != null) { dbName = getQualifiedName(context.db); } String pattern = null; if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); pattern = stringLiteral.getValue(); } Expr where = null; if (context.expression() != null) { where = (Expr) visit(context.expression()); } return new ShowColumnStmt(qualifiedNameToTableName(tableName), dbName == null ? null : dbName.toString(), pattern, context.FULL() != null, where); } @Override public ParseNode visitShowTableStatusStatement(StarRocksParser.ShowTableStatusStatementContext context) { QualifiedName dbName = null; if (context.qualifiedName() != null) { dbName = getQualifiedName(context.db); } String pattern = null; if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); pattern = stringLiteral.getValue(); } Expr where = null; if (context.expression() != null) { where = (Expr) visit(context.expression()); } return new ShowTableStatusStmt(dbName == null ? null : dbName.toString(), pattern, where); } @Override public ParseNode visitCreateViewStatement(StarRocksParser.CreateViewStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<ColWithComment> colWithComments = null; if (context.columnNameWithComment().size() > 0) { colWithComments = visit(context.columnNameWithComment(), ColWithComment.class); } return new CreateViewStmt( context.IF() != null, targetTableName, colWithComments, context.comment() == null ? null : ((StringLiteral) visit(context.comment())).getStringValue(), (QueryStatement) visit(context.queryStatement())); } @Override public ParseNode visitAlterViewStatement(StarRocksParser.AlterViewStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<ColWithComment> colWithComments = null; if (context.columnNameWithComment().size() > 0) { colWithComments = visit(context.columnNameWithComment(), ColWithComment.class); } return new AlterViewStmt(targetTableName, colWithComments, (QueryStatement) visit(context.queryStatement())); } @Override public ParseNode visitDropViewStatement(StarRocksParser.DropViewStatementContext context) { boolean ifExists = context.IF() != null && context.EXISTS() != null; QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); return new DropTableStmt(ifExists, targetTableName, true, false); } @Override public ParseNode visitSubmitTaskStatement(StarRocksParser.SubmitTaskStatementContext context) { QualifiedName qualifiedName = null; if (context.qualifiedName() != null) { qualifiedName = getQualifiedName(context.qualifiedName()); } Map<String, String> properties = new HashMap<>(); if (context.hint() != null) { for (StarRocksParser.HintContext hintContext : context.hint()) { for (StarRocksParser.HintMapContext hintMapContext : hintContext.hintMap()) { String key = hintMapContext.k.getText(); String value = hintMapContext.v.getText(); properties.put(key, value); } } } CreateTableAsSelectStmt createTableAsSelectStmt = (CreateTableAsSelectStmt) visit(context.createTableAsSelectStatement()); if (qualifiedName == null) { return new SubmitTaskStmt(null, null, properties, createTableAsSelectStmt); } else if (qualifiedName.getParts().size() == 1) { return new SubmitTaskStmt(null, qualifiedName.getParts().get(0), properties, createTableAsSelectStmt); } else if (qualifiedName.getParts().size() == 2) { return new SubmitTaskStmt(SystemInfoService.DEFAULT_CLUSTER + ":" + qualifiedName.getParts().get(0), qualifiedName.getParts().get(1), properties, createTableAsSelectStmt); } else { throw new ParsingException("error task name "); } } @Override public ParseNode visitCreateMaterializedViewStatement( StarRocksParser.CreateMaterializedViewStatementContext context) { if (!Config.enable_experimental_mv) { throw new ParsingException("The experimental mv is disabled"); } boolean ifNotExist = context.IF() != null; QualifiedName qualifiedName = getQualifiedName(context.mvName); TableName tableName = qualifiedNameToTableName(qualifiedName); String comment = context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue(); QueryStatement queryStatement = (QueryStatement) visit(context.queryStatement()); Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } RefreshSchemeDesc refreshSchemeDesc = null; if (context.refreshSchemeDesc() == null) { refreshSchemeDesc = new SyncRefreshSchemeDesc(); } else { refreshSchemeDesc = ((RefreshSchemeDesc) visit(context.refreshSchemeDesc())); } if (refreshSchemeDesc instanceof SyncRefreshSchemeDesc) { if (context.primaryExpression() != null) { throw new IllegalArgumentException( "Partition by is not supported by SYNC refresh type int materialized view"); } if (context.distributionDesc() != null) { throw new IllegalArgumentException( "Distribution by is not supported by SYNC refresh type in materialized view"); } String sql = AST2SQL.toString(queryStatement); StatementBase statement = SqlParser.parseWithOldParser(sql, sqlMode, 0); if (!(statement instanceof SelectStmt)) { throw new IllegalArgumentException("Materialized view query statement only support select"); } return new CreateMaterializedViewStmt(tableName.getTbl(), (SelectStmt) statement, properties); } ExpressionPartitionDesc expressionPartitionDesc = null; if (context.primaryExpression() != null) { Expr expr = (Expr) visit(context.primaryExpression()); if (expr instanceof SlotRef) { expressionPartitionDesc = new ExpressionPartitionDesc(expr); } else if (expr instanceof FunctionCallExpr) { for (Expr child : expr.getChildren()) { if (child instanceof SlotRef) { expressionPartitionDesc = new ExpressionPartitionDesc(expr); break; } } if (expressionPartitionDesc == null) { throw new IllegalArgumentException( "Partition exp not supports:" + expr.toSql()); } } else { throw new IllegalArgumentException( "Partition exp not supports:" + expr.toSql()); } } DistributionDesc distributionDesc = context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()); return new CreateMaterializedViewStatement(tableName, ifNotExist, comment, refreshSchemeDesc, expressionPartitionDesc, distributionDesc, properties, queryStatement); } @Override public ParseNode visitShowMaterializedViewStatement(StarRocksParser.ShowMaterializedViewStatementContext context) { String database = null; if (context.qualifiedName() != null) { database = getQualifiedName(context.qualifiedName()).toString(); } return new ShowMaterializedViewStmt(database); } @Override public ParseNode visitDropMaterializedViewStatement(StarRocksParser.DropMaterializedViewStatementContext context) { QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName()); TableName mvName = qualifiedNameToTableName(mvQualifiedName); return new DropMaterializedViewStmt(context.IF() != null, mvName); } @Override public ParseNode visitAlterSystemStatement(StarRocksParser.AlterSystemStatementContext context) { return new AlterSystemStmt((AlterClause) visit(context.alterClause())); } @Override public ParseNode visitCreateExternalCatalogStatement( StarRocksParser.CreateExternalCatalogStatementContext context) { Identifier identifier = (Identifier) visit(context.identifierOrString()); String catalogName = identifier.getValue(); String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } return new CreateCatalogStmt(catalogName, comment, properties); } @Override public ParseNode visitDropExternalCatalogStatement(StarRocksParser.DropExternalCatalogStatementContext context) { Identifier identifier = (Identifier) visit(context.catalogName); String catalogName = identifier.getValue(); return new DropCatalogStmt(catalogName); } @Override public ParseNode visitShowCatalogsStatement(StarRocksParser.ShowCatalogsStatementContext context) { return new ShowCatalogsStmt(); } @Override public ParseNode visitCreateIndexClause(StarRocksParser.CreateIndexClauseContext context) { String indexName = ((Identifier) visit(context.identifier())).getValue(); List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class); String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } IndexDef indexDef = new IndexDef(indexName, columnList.stream().map(Identifier::getValue).collect(toList()), IndexDef.IndexType.BITMAP, comment); return new CreateIndexClause(null, indexDef, true); } @Override public ParseNode visitDropIndexClause(StarRocksParser.DropIndexClauseContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new DropIndexClause(identifier.getValue(), null, true); } @Override public ParseNode visitTableRenameClause(StarRocksParser.TableRenameClauseContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new TableRenameClause(identifier.getValue()); } @Override public ParseNode visitAdminSetReplicaStatus(StarRocksParser.AdminSetReplicaStatusContext context) { Map<String, String> properties = new HashMap<>(); List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } return new AdminSetReplicaStatusStmt(properties); } @Override public ParseNode visitAddBackendClause(StarRocksParser.AddBackendClauseContext context) { List<String> clusters = context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList()); if (context.TO() != null) { Identifier identifier = (Identifier) visit(context.identifier()); return new AddBackendClause(clusters, identifier.getValue()); } if (context.FREE() != null) { return new AddBackendClause(clusters, true); } return new AddBackendClause(clusters, false); } @Override public ParseNode visitDropBackendClause(StarRocksParser.DropBackendClauseContext context) { List<String> clusters = context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList()); return new DropBackendClause(clusters, context.FORCE() != null); } @Override public ParseNode visitAddFrontendClause(StarRocksParser.AddFrontendClauseContext context) { String cluster = ((StringLiteral) visit(context.string())).getStringValue(); if (context.FOLLOWER() != null) { return new AddFollowerClause(cluster); } else if (context.OBSERVER() != null) { return new AddObserverClause(cluster); } else { Preconditions.checkState(false, "frontend clause error."); return null; } } @Override public ParseNode visitDropFrontendClause(StarRocksParser.DropFrontendClauseContext context) { String cluster = ((StringLiteral) visit(context.string())).getStringValue(); if (context.FOLLOWER() != null) { return new DropFollowerClause(cluster); } else if (context.OBSERVER() != null) { return new DropObserverClause(cluster); } else { Preconditions.checkState(false, "frontend clause error."); return null; } } @Override public ParseNode visitInsertStatement(StarRocksParser.InsertStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); PartitionNames partitionNames = null; if (context.partitionNames() != null) { partitionNames = (PartitionNames) visit(context.partitionNames()); } QueryStatement queryStatement; if (context.VALUES() != null) { List<ValueList> rowValues = visit(context.expressionsWithDefault(), ValueList.class); List<ArrayList<Expr>> rows = rowValues.stream().map(ValueList::getFirstRow).collect(toList()); List<String> colNames = new ArrayList<>(); for (int i = 0; i < rows.get(0).size(); ++i) { colNames.add("column_" + i); } queryStatement = new QueryStatement(new ValuesRelation(rows, colNames)); } else { queryStatement = (QueryStatement) visit(context.queryStatement()); } List<String> targetColumnNames = null; if (context.columnAliases() != null) { List<Identifier> targetColumnNamesIdentifiers = visitIfPresent(context.columnAliases().identifier(), Identifier.class); if (targetColumnNamesIdentifiers != null) { targetColumnNames = targetColumnNamesIdentifiers.stream() .map(Identifier::getValue).map(String::toLowerCase).collect(toList()); } } if (context.explainDesc() != null) { queryStatement.setIsExplain(true, getExplainType(context.explainDesc())); } return new InsertStmt( new InsertTarget(targetTableName, partitionNames), context.label == null ? null : ((Identifier) visit(context.label)).getValue(), targetColumnNames, queryStatement, Lists.newArrayList()); } @Override public ParseNode visitUpdateStatement(StarRocksParser.UpdateStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); List<ColumnAssignment> assignments = visit(context.assignmentList().assignment(), ColumnAssignment.class); Expr where = context.where != null ? (Expr) visit(context.where) : null; UpdateStmt ret = new UpdateStmt(targetTableName, assignments, where); if (context.explainDesc() != null) { ret.setIsExplain(true, getExplainType(context.explainDesc())); } return ret; } @Override public ParseNode visitDeleteStatement(StarRocksParser.DeleteStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName targetTableName = qualifiedNameToTableName(qualifiedName); PartitionNames partitionNames = null; if (context.partitionNames() != null) { partitionNames = (PartitionNames) visit(context.partitionNames()); } Expr where = context.where != null ? (Expr) visit(context.where) : null; DeleteStmt ret = new DeleteStmt(targetTableName, partitionNames, where); if (context.explainDesc() != null) { ret.setIsExplain(true, getExplainType(context.explainDesc())); } return ret; } @Override public ParseNode visitAnalyzeStatement(StarRocksParser.AnalyzeStatementContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName tableName = qualifiedNameToTableName(qualifiedName); List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class); List<String> columnNames = null; if (columns != null) { columnNames = columns.stream().map(Identifier::getValue).collect(toList()); } Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } return new AnalyzeStmt(tableName, columnNames, properties, context.FULL() == null); } @Override public ParseNode visitCreateAnalyzeStatement(StarRocksParser.CreateAnalyzeStatementContext context) { Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } if (context.DATABASE() != null) { return new CreateAnalyzeJobStmt(((Identifier) visit(context.db)).getValue(), context.FULL() == null, properties); } else if (context.TABLE() != null) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName tableName = qualifiedNameToTableName(qualifiedName); List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class); List<String> columnNames = null; if (columns != null) { columnNames = columns.stream().map(Identifier::getValue).collect(toList()); } return new CreateAnalyzeJobStmt(tableName, columnNames, context.FULL() == null, properties); } else { return new CreateAnalyzeJobStmt(context.FULL() == null, properties); } } @Override public ParseNode visitDropAnalyzeJobStatement(StarRocksParser.DropAnalyzeJobStatementContext context) { return new DropAnalyzeJobStmt(Long.parseLong(context.INTEGER_VALUE().getText())); } @Override public ParseNode visitShowAnalyzeStatement(StarRocksParser.ShowAnalyzeStatementContext context) { return new ShowAnalyzeStmt(); } @Override public ParseNode visitCreateWorkGroupStatement(StarRocksParser.CreateWorkGroupStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); String name = identifier.getValue(); List<List<Predicate>> predicatesList = new ArrayList<>(); for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) { List<Predicate> p = visit(classifierContext.expression(), Predicate.class); predicatesList.add(p); } Map<String, String> properties = new HashMap<>(); List<Property> propertyList = visit(context.property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } return new CreateWorkGroupStmt(name, context.EXISTS() != null, context.REPLACE() != null, predicatesList, properties); } @Override public ParseNode visitDropWorkGroupStatement(StarRocksParser.DropWorkGroupStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new DropWorkGroupStmt(identifier.getValue()); } @Override public ParseNode visitAlterWorkGroupStatement(StarRocksParser.AlterWorkGroupStatementContext context) { Identifier identifier = (Identifier) visit(context.identifier()); String name = identifier.getValue(); if (context.ADD() != null) { List<List<Predicate>> predicatesList = new ArrayList<>(); for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) { List<Predicate> p = visit(classifierContext.expression(), Predicate.class); predicatesList.add(p); } return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.AddClassifiers(predicatesList)); } else if (context.DROP() != null) { if (context.ALL() != null) { return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.DropAllClassifiers()); } else { return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.DropClassifiers(context.INTEGER_VALUE() .stream().map(ParseTree::getText).map(Long::parseLong).collect(toList()))); } } else { Map<String, String> properties = new HashMap<>(); List<Property> propertyList = visit(context.property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } return new AlterWorkGroupStmt(name, new AlterWorkGroupStmt.AlterProperties(properties)); } } @Override public ParseNode visitShowWorkGroupStatement(StarRocksParser.ShowWorkGroupStatementContext context) { if (context.GROUPS() != null) { return new ShowWorkGroupStmt(null, context.ALL() != null); } else { Identifier identifier = (Identifier) visit(context.identifier()); return new ShowWorkGroupStmt(identifier.getValue(), false); } } @Override public ParseNode visitQueryStatement(StarRocksParser.QueryStatementContext context) { QueryRelation queryRelation = (QueryRelation) visit(context.queryBody()); QueryStatement queryStatement = new QueryStatement(queryRelation); if (context.outfile() != null) { queryStatement.setOutFileClause((OutFileClause) visit(context.outfile())); } if (context.explainDesc() != null) { queryStatement.setIsExplain(true, getExplainType(context.explainDesc())); } return queryStatement; } @Override public ParseNode visitQueryBody(StarRocksParser.QueryBodyContext context) { QueryRelation queryRelation = (QueryRelation) visit(context.queryNoWith()); List<CTERelation> withQuery = new ArrayList<>(); if (context.withClause() != null) { withQuery = visit(context.withClause().commonTableExpression(), CTERelation.class); } withQuery.forEach(queryRelation::addCTERelation); return queryRelation; } @Override public ParseNode visitCommonTableExpression(StarRocksParser.CommonTableExpressionContext context) { List<Identifier> columns = null; if (context.columnAliases() != null) { columns = visit(context.columnAliases().identifier(), Identifier.class); } List<String> columnNames = null; if (columns != null) { columnNames = columns.stream().map(Identifier::getValue).collect(toList()); } QueryRelation queryRelation = (QueryRelation) visit(context.queryBody()); return new CTERelation( RelationId.of(queryRelation).hashCode(), ((Identifier) visit(context.name)).getValue(), columnNames, new QueryStatement(queryRelation)); } @Override public ParseNode visitQueryNoWith(StarRocksParser.QueryNoWithContext context) { List<OrderByElement> orderByElements = new ArrayList<>(); if (context.ORDER() != null) { orderByElements.addAll(visit(context.sortItem(), OrderByElement.class)); } LimitElement limitElement = null; if (context.limitElement() != null) { limitElement = (LimitElement) visit(context.limitElement()); } QueryRelation term = (QueryRelation) visit(context.queryTerm()); term.setOrderBy(orderByElements); term.setLimit(limitElement); return term; } @Override public ParseNode visitSetOperation(StarRocksParser.SetOperationContext context) { QueryRelation left = (QueryRelation) visit(context.left); QueryRelation right = (QueryRelation) visit(context.right); boolean distinct = true; if (context.setQuantifier() != null) { if (context.setQuantifier().DISTINCT() != null) { distinct = true; } else if (context.setQuantifier().ALL() != null) { distinct = false; } } SetQualifier setQualifier = distinct ? SetQualifier.DISTINCT : SetQualifier.ALL; switch (context.operator.getType()) { case StarRocksLexer.UNION: if (left instanceof UnionRelation && ((UnionRelation) left).getQualifier().equals(setQualifier)) { ((UnionRelation) left).addRelation(right); return left; } else { return new UnionRelation(Lists.newArrayList(left, right), setQualifier); } case StarRocksLexer.INTERSECT: if (left instanceof IntersectRelation && ((IntersectRelation) left).getQualifier().equals(setQualifier)) { ((IntersectRelation) left).addRelation(right); return left; } else { return new IntersectRelation(Lists.newArrayList(left, right), setQualifier); } case StarRocksLexer.EXCEPT: case StarRocksLexer.MINUS: if (left instanceof ExceptRelation && ((ExceptRelation) left).getQualifier().equals(setQualifier)) { ((ExceptRelation) left).addRelation(right); return left; } else { return new ExceptRelation(Lists.newArrayList(left, right), setQualifier); } } throw new IllegalArgumentException("Unsupported set operation: " + context.operator.getText()); } @Override public ParseNode visitQuerySpecification(StarRocksParser.QuerySpecificationContext context) { Relation from = null; List<SelectListItem> selectItems = visit(context.selectItem(), SelectListItem.class); if (context.fromClause() instanceof StarRocksParser.DualContext) { if (selectItems.stream().anyMatch(SelectListItem::isStar)) { ErrorReport.reportSemanticException(ErrorCode.ERR_NO_TABLES_USED); } } else { StarRocksParser.FromContext fromContext = (StarRocksParser.FromContext) context.fromClause(); List<Relation> relations = visit(fromContext.relation(), Relation.class); if (!relations.isEmpty()) { Iterator<Relation> iterator = relations.iterator(); Relation relation = iterator.next(); while (iterator.hasNext()) { relation = new JoinRelation(null, relation, iterator.next(), null, false); } from = relation; } } /* from == null means a statement without from or from dual, add a single row of null values here, so that the semantics are the same, and the processing of subsequent query logic can be simplified, such as select sum(1) or select sum(1) from dual, will be converted to select sum(1) from (values(null)) t. This can share the same logic as select sum(1) from table */ if (from == null) { ArrayList<Expr> row = new ArrayList<>(); List<String> columnNames = new ArrayList<>(); row.add(NullLiteral.create(Type.NULL)); columnNames.add(""); List<ArrayList<Expr>> rows = new ArrayList<>(); rows.add(row); ValuesRelation valuesRelation = new ValuesRelation(rows, columnNames); valuesRelation.setNullValues(true); from = valuesRelation; } boolean isDistinct = context.setQuantifier() != null && context.setQuantifier().DISTINCT() != null; SelectList selectList = new SelectList(selectItems, isDistinct); if (context.hint() != null) { Map<String, String> selectHints = new HashMap<>(); for (StarRocksParser.HintContext hintContext : context.hint()) { for (StarRocksParser.HintMapContext hintMapContext : hintContext.hintMap()) { String key = hintMapContext.k.getText(); String value = hintMapContext.v.getText(); selectHints.put(key, value); } } selectList.setOptHints(selectHints); } return new SelectRelation( selectList, from, (Expr) visitIfPresent(context.where), (GroupByClause) visitIfPresent(context.groupingElement()), (Expr) visitIfPresent(context.having)); } @Override public ParseNode visitSelectSingle(StarRocksParser.SelectSingleContext context) { String alias = null; if (context.identifier() != null) { alias = ((Identifier) visit(context.identifier())).getValue(); } else if (context.string() != null) { alias = ((StringLiteral) visit(context.string())).getStringValue(); } return new SelectListItem((Expr) visit(context.expression()), alias); } @Override public ParseNode visitSelectAll(StarRocksParser.SelectAllContext context) { if (context.qualifiedName() != null) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); return new SelectListItem(qualifiedNameToTableName(qualifiedName)); } return new SelectListItem(null); } @Override public ParseNode visitSingleGroupingSet(StarRocksParser.SingleGroupingSetContext context) { return new GroupByClause(new ArrayList<>(visit(context.expression(), Expr.class)), GroupByClause.GroupingType.GROUP_BY); } @Override public ParseNode visitRollup(StarRocksParser.RollupContext context) { List<Expr> groupingExprs = visit(context.expression(), Expr.class); return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.ROLLUP); } @Override public ParseNode visitCube(StarRocksParser.CubeContext context) { List<Expr> groupingExprs = visit(context.expression(), Expr.class); return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.CUBE); } @Override public ParseNode visitMultipleGroupingSets(StarRocksParser.MultipleGroupingSetsContext context) { List<ArrayList<Expr>> groupingSets = new ArrayList<>(); for (StarRocksParser.GroupingSetContext groupingSetContext : context.groupingSet()) { List<Expr> l = visit(groupingSetContext.expression(), Expr.class); groupingSets.add(new ArrayList<>(l)); } return new GroupByClause(groupingSets, GroupByClause.GroupingType.GROUPING_SETS); } @Override public ParseNode visitGroupingOperation(StarRocksParser.GroupingOperationContext context) { List<Expr> arguments = visit(context.expression(), Expr.class); return new GroupingFunctionCallExpr("grouping", arguments); } @Override public ParseNode visitWindowFrame(StarRocksParser.WindowFrameContext context) { if (context.end != null) { return new AnalyticWindow( getFrameType(context.frameType), (AnalyticWindow.Boundary) visit(context.start), (AnalyticWindow.Boundary) visit(context.end)); } else { return new AnalyticWindow( getFrameType(context.frameType), (AnalyticWindow.Boundary) visit(context.start)); } } private static AnalyticWindow.Type getFrameType(Token type) { switch (type.getType()) { case StarRocksLexer.RANGE: return AnalyticWindow.Type.RANGE; case StarRocksLexer.ROWS: return AnalyticWindow.Type.ROWS; } throw new IllegalArgumentException("Unsupported frame type: " + type.getText()); } @Override public ParseNode visitUnboundedFrame(StarRocksParser.UnboundedFrameContext context) { return new AnalyticWindow.Boundary(getUnboundedFrameBoundType(context.boundType), null); } @Override public ParseNode visitBoundedFrame(StarRocksParser.BoundedFrameContext context) { return new AnalyticWindow.Boundary(getBoundedFrameBoundType(context.boundType), (Expr) visit(context.expression())); } @Override public ParseNode visitCurrentRowBound(StarRocksParser.CurrentRowBoundContext context) { return new AnalyticWindow.Boundary(AnalyticWindow.BoundaryType.CURRENT_ROW, null); } private static AnalyticWindow.BoundaryType getBoundedFrameBoundType(Token token) { switch (token.getType()) { case StarRocksLexer.PRECEDING: return AnalyticWindow.BoundaryType.PRECEDING; case StarRocksLexer.FOLLOWING: return AnalyticWindow.BoundaryType.FOLLOWING; } throw new IllegalArgumentException("Unsupported bound type: " + token.getText()); } private static AnalyticWindow.BoundaryType getUnboundedFrameBoundType(Token token) { switch (token.getType()) { case StarRocksLexer.PRECEDING: return AnalyticWindow.BoundaryType.UNBOUNDED_PRECEDING; case StarRocksLexer.FOLLOWING: return AnalyticWindow.BoundaryType.UNBOUNDED_FOLLOWING; } throw new IllegalArgumentException("Unsupported bound type: " + token.getText()); } @Override public ParseNode visitSortItem(StarRocksParser.SortItemContext context) { return new OrderByElement( (Expr) visit(context.expression()), getOrderingType(context.ordering), getNullOrderingType(getOrderingType(context.ordering), context.nullOrdering)); } private boolean getNullOrderingType(boolean isAsc, Token token) { if (token == null) { return (!SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_SORT_NULLS_LAST)) == isAsc; } switch (token.getType()) { case StarRocksLexer.FIRST: return true; case StarRocksLexer.LAST: return false; } throw new IllegalArgumentException("Unsupported ordering: " + token.getText()); } private static boolean getOrderingType(Token token) { if (token == null) { return true; } switch (token.getType()) { case StarRocksLexer.ASC: return true; case StarRocksLexer.DESC: return false; } throw new IllegalArgumentException("Unsupported ordering: " + token.getText()); } @Override public ParseNode visitLimitElement(StarRocksParser.LimitElementContext context) { long limit = Long.parseLong(context.limit.getText()); long offset = 0; if (context.offset != null) { offset = Long.parseLong(context.offset.getText()); } return new LimitElement(offset, limit); } @Override public ParseNode visitParenthesizedRelation(StarRocksParser.ParenthesizedRelationContext context) { return visit(context.relation()); } @Override public ParseNode visitTableName(StarRocksParser.TableNameContext context) { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); TableName tableName = qualifiedNameToTableName(qualifiedName); PartitionNames partitionNames = null; if (context.partitionNames() != null) { partitionNames = (PartitionNames) visit(context.partitionNames()); } List<Long> tabletIds = Lists.newArrayList(); if (context.tabletList() != null) { tabletIds = context.tabletList().INTEGER_VALUE().stream().map(ParseTree::getText) .map(Long::parseLong).collect(toList()); } TableRelation tableRelation = new TableRelation(tableName, partitionNames, tabletIds); if (context.hint() != null) { for (TerminalNode hint : context.hint().IDENTIFIER()) { if (hint.getText().equalsIgnoreCase("_META_")) { tableRelation.setMetaQuery(true); } } } return tableRelation; } @Override public ParseNode visitAliasedRelation(StarRocksParser.AliasedRelationContext context) { Relation child = (Relation) visit(context.relationPrimary()); if (context.identifier() == null) { return child; } Identifier identifier = (Identifier) visit(context.identifier()); child.setAlias(new TableName(null, identifier.getValue())); return child; } @Override public ParseNode visitJoinRelation(StarRocksParser.JoinRelationContext context) { Relation left = (Relation) visit(context.left); Relation right = (Relation) visit(context.rightRelation); JoinOperator joinType = JoinOperator.INNER_JOIN; if (context.crossOrInnerJoinType() != null) { if (context.crossOrInnerJoinType().CROSS() != null) { joinType = JoinOperator.CROSS_JOIN; } else { joinType = JoinOperator.INNER_JOIN; } } else if (context.outerAndSemiJoinType().LEFT() != null) { if (context.outerAndSemiJoinType().OUTER() != null) { joinType = JoinOperator.LEFT_OUTER_JOIN; } else if (context.outerAndSemiJoinType().SEMI() != null) { joinType = JoinOperator.LEFT_SEMI_JOIN; } else if (context.outerAndSemiJoinType().ANTI() != null) { joinType = JoinOperator.LEFT_ANTI_JOIN; } else { joinType = JoinOperator.LEFT_OUTER_JOIN; } } else if (context.outerAndSemiJoinType().RIGHT() != null) { if (context.outerAndSemiJoinType().OUTER() != null) { joinType = JoinOperator.RIGHT_OUTER_JOIN; } else if (context.outerAndSemiJoinType().SEMI() != null) { joinType = JoinOperator.RIGHT_SEMI_JOIN; } else if (context.outerAndSemiJoinType().ANTI() != null) { joinType = JoinOperator.RIGHT_ANTI_JOIN; } else { joinType = JoinOperator.RIGHT_OUTER_JOIN; } } else if (context.outerAndSemiJoinType().FULL() != null) { joinType = JoinOperator.FULL_OUTER_JOIN; } Expr predicate = null; List<String> usingColNames = null; if (context.joinCriteria() != null) { if (context.joinCriteria().ON() != null) { predicate = (Expr) visit(context.joinCriteria().expression()); } else if (context.joinCriteria().USING() != null) { List<Identifier> criteria = visit(context.joinCriteria().identifier(), Identifier.class); usingColNames = criteria.stream().map(Identifier::getValue).collect(Collectors.toList()); } else { throw new IllegalArgumentException("Unsupported join criteria"); } } JoinRelation joinRelation = new JoinRelation(joinType, left, right, predicate, context.LATERAL() != null); joinRelation.setUsingColNames(usingColNames); if (context.hint() != null) { joinRelation.setJoinHint(context.hint().IDENTIFIER(0).getText()); } return joinRelation; } @Override public ParseNode visitInlineTable(StarRocksParser.InlineTableContext context) { List<ValueList> rowValues = visit(context.rowConstructor(), ValueList.class); List<ArrayList<Expr>> rows = rowValues.stream().map(ValueList::getFirstRow).collect(toList()); List<String> colNames = new ArrayList<>(); for (int i = 0; i < rows.get(0).size(); ++i) { colNames.add("column_" + i); } return new ValuesRelation(rows, colNames); } @Override public ParseNode visitTableFunction(StarRocksParser.TableFunctionContext context) { return new TableFunctionRelation(getQualifiedName(context.qualifiedName()).toString(), new FunctionParams(false, visit(context.expression(), Expr.class))); } @Override public ParseNode visitRowConstructor(StarRocksParser.RowConstructorContext context) { ArrayList<Expr> row = new ArrayList<>(visit(context.expression(), Expr.class)); return new ValueList(row); } @Override public ParseNode visitPartitionNames(StarRocksParser.PartitionNamesContext context) { List<Identifier> identifierList = visit(context.identifier(), Identifier.class); return new PartitionNames(context.TEMPORARY() != null, identifierList.stream().map(Identifier::getValue).collect(toList())); } @Override public ParseNode visitSubquery(StarRocksParser.SubqueryContext context) { return new SubqueryRelation(new QueryStatement((QueryRelation) visit(context.queryBody()))); } @Override public ParseNode visitSubqueryPrimary(StarRocksParser.SubqueryPrimaryContext context) { SubqueryRelation subqueryRelation = (SubqueryRelation) visit(context.subquery()); return subqueryRelation.getQueryStatement().getQueryRelation(); } @Override public ParseNode visitSubqueryRelation(StarRocksParser.SubqueryRelationContext context) { return visit(context.subquery()); } @Override public ParseNode visitSubqueryExpression(StarRocksParser.SubqueryExpressionContext context) { SubqueryRelation subqueryRelation = (SubqueryRelation) visit(context.subquery()); return new Subquery(subqueryRelation.getQueryStatement()); } @Override public ParseNode visitInSubquery(StarRocksParser.InSubqueryContext context) { boolean isNotIn = context.NOT() != null; QueryRelation query = (QueryRelation) visit(context.queryBody()); return new InPredicate((Expr) visit(context.value), new Subquery(new QueryStatement(query)), isNotIn); } @Override public ParseNode visitExists(StarRocksParser.ExistsContext context) { QueryRelation query = (QueryRelation) visit(context.queryBody()); return new ExistsPredicate(new Subquery(new QueryStatement(query)), false); } @Override public ParseNode visitScalarSubquery(StarRocksParser.ScalarSubqueryContext context) { BinaryPredicate.Operator op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0)) .getSymbol()); Subquery subquery = new Subquery(new QueryStatement((QueryRelation) visit(context.queryBody()))); return new BinaryPredicate(op, (Expr) visit(context.booleanExpression()), subquery); } @Override @Override public ParseNode visitUse(StarRocksParser.UseContext context) { Identifier identifier = (Identifier) visit(context.identifier()); return new UseStmt(identifier.getValue()); } @Override public ParseNode visitAdminSetConfig(StarRocksParser.AdminSetConfigContext context) { Map<String, String> configs = new HashMap<>(); Property property = (Property) visitProperty(context.property()); String configKey = property.getKey(); String configValue = property.getValue(); configs.put(configKey, configValue); return new AdminSetConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, configs); } @Override public ParseNode visitGrantRole(StarRocksParser.GrantRoleContext context) { UserIdentifier user = (UserIdentifier) visit(context.user()); Identifier identifier = (Identifier) visit(context.identifierOrString()); return new GrantRoleStmt(identifier.getValue(), user.getUserIdentity()); } @Override public ParseNode visitRevokeRole(StarRocksParser.RevokeRoleContext context) { UserIdentifier user = (UserIdentifier) visit(context.user()); Identifier identifier = (Identifier) visit(context.identifierOrString()); return new RevokeRoleStmt(identifier.getValue(), user.getUserIdentity()); } @Override public ParseNode visitGrantImpersonate(StarRocksParser.GrantImpersonateContext context) { UserIdentity securedUser = ((UserIdentifier) visit(context.user(0))).getUserIdentity(); UserIdentity authorizedUser = ((UserIdentifier) visit(context.user(1))).getUserIdentity(); return new GrantImpersonateStmt(authorizedUser, securedUser); } @Override public ParseNode visitRevokeImpersonate(StarRocksParser.RevokeImpersonateContext context) { UserIdentity securedUser = ((UserIdentifier) visit(context.user(0))).getUserIdentity(); UserIdentity authorizedUser = ((UserIdentifier) visit(context.user(1))).getUserIdentity(); return new RevokeImpersonateStmt(authorizedUser, securedUser); } @Override public ParseNode visitExecuteAs(StarRocksParser.ExecuteAsContext context) { UserIdentity toUser = ((UserIdentifier) visit(context.user())).getUserIdentity(); boolean allowRevert = context.WITH() == null; return new ExecuteAsStmt(toUser, allowRevert); } @Override public ParseNode visitShowVariablesStatement(StarRocksParser.ShowVariablesStatementContext context) { String pattern = null; if (context.pattern != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.pattern); pattern = stringLiteral.getValue(); } Expr where = null; if (context.expression() != null) { where = (Expr) visit(context.expression()); } return new ShowVariablesStmt(getVariableType(context.varType()), pattern, where); } @Override public ParseNode visitExpressionOrDefault(StarRocksParser.ExpressionOrDefaultContext context) { if (context.DEFAULT() != null) { return new DefaultValueExpr(); } else { return visit(context.expression()); } } @Override public ParseNode visitExpressionsWithDefault(StarRocksParser.ExpressionsWithDefaultContext context) { ArrayList<Expr> row = Lists.newArrayList(); for (int i = 0; i < context.expressionOrDefault().size(); ++i) { row.add((Expr) visit(context.expressionOrDefault(i))); } return new ValueList(row); } @Override public ParseNode visitLogicalNot(StarRocksParser.LogicalNotContext context) { return new CompoundPredicate(CompoundPredicate.Operator.NOT, (Expr) visit(context.expression()), null); } @Override public ParseNode visitLogicalBinary(StarRocksParser.LogicalBinaryContext context) { Expr left = (Expr) visit(context.left); Expr right = (Expr) visit(context.right); if (context.operator.getType() == StarRocksLexer.LOGICAL_OR) { return new CompoundPredicate(CompoundPredicate.Operator.OR, left, right); } else { return new CompoundPredicate(getLogicalBinaryOperator(context.operator), left, right); } } private static CompoundPredicate.Operator getLogicalBinaryOperator(Token token) { switch (token.getType()) { case StarRocksLexer.AND: return CompoundPredicate.Operator.AND; case StarRocksLexer.OR: return CompoundPredicate.Operator.OR; } throw new IllegalArgumentException("Unsupported operator: " + token.getText()); } @Override public ParseNode visitPredicate(StarRocksParser.PredicateContext context) { if (context.predicateOperations() != null) { return visit(context.predicateOperations()); } else { return visit(context.valueExpression()); } } @Override public ParseNode visitIsNull(StarRocksParser.IsNullContext context) { Expr child = (Expr) visit(context.booleanExpression()); if (context.NOT() == null) { return new IsNullPredicate(child, false); } else { return new IsNullPredicate(child, true); } } @Override public ParseNode visitComparison(StarRocksParser.ComparisonContext context) { BinaryPredicate.Operator op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0)) .getSymbol()); return new BinaryPredicate(op, (Expr) visit(context.left), (Expr) visit(context.right)); } private static BinaryPredicate.Operator getComparisonOperator(Token symbol) { switch (symbol.getType()) { case StarRocksParser.EQ: return BinaryPredicate.Operator.EQ; case StarRocksParser.NEQ: return BinaryPredicate.Operator.NE; case StarRocksParser.LT: return BinaryPredicate.Operator.LT; case StarRocksParser.LTE: return BinaryPredicate.Operator.LE; case StarRocksParser.GT: return BinaryPredicate.Operator.GT; case StarRocksParser.GTE: return BinaryPredicate.Operator.GE; case StarRocksParser.EQ_FOR_NULL: return BinaryPredicate.Operator.EQ_FOR_NULL; } throw new IllegalArgumentException("Unsupported operator: " + symbol.getText()); } @Override public ParseNode visitInList(StarRocksParser.InListContext context) { boolean isNotIn = context.NOT() != null; return new InPredicate( (Expr) visit(context.value), visit(context.expression(), Expr.class), isNotIn); } @Override public ParseNode visitBetween(StarRocksParser.BetweenContext context) { boolean isNotBetween = context.NOT() != null; return new BetweenPredicate( (Expr) visit(context.value), (Expr) visit(context.lower), (Expr) visit(context.upper), isNotBetween); } @Override public ParseNode visitLike(StarRocksParser.LikeContext context) { LikePredicate likePredicate; if (context.REGEXP() != null || context.RLIKE() != null) { likePredicate = new LikePredicate(LikePredicate.Operator.REGEXP, (Expr) visit(context.value), (Expr) visit(context.pattern)); } else { likePredicate = new LikePredicate( LikePredicate.Operator.LIKE, (Expr) visit(context.value), (Expr) visit(context.pattern)); } if (context.NOT() != null) { return new CompoundPredicate(CompoundPredicate.Operator.NOT, likePredicate, null); } else { return likePredicate; } } @Override public ParseNode visitSimpleCase(StarRocksParser.SimpleCaseContext context) { return new CaseExpr( (Expr) visit(context.caseExpr), visit(context.whenClause(), CaseWhenClause.class), (Expr) visitIfPresent(context.elseExpression)); } @Override public ParseNode visitSearchedCase(StarRocksParser.SearchedCaseContext context) { return new CaseExpr( null, visit(context.whenClause(), CaseWhenClause.class), (Expr) visitIfPresent(context.elseExpression)); } @Override public ParseNode visitWhenClause(StarRocksParser.WhenClauseContext context) { return new CaseWhenClause((Expr) visit(context.condition), (Expr) visit(context.result)); } @Override public ParseNode visitArithmeticUnary(StarRocksParser.ArithmeticUnaryContext context) { Expr child = (Expr) visit(context.primaryExpression()); switch (context.operator.getType()) { case StarRocksLexer.MINUS_SYMBOL: if (child.isLiteral() && child.getType().isNumericType()) { try { ((LiteralExpr) child).swapSign(); } catch (NotImplementedException e) { throw new ParsingException(e.getMessage()); } return child; } else { return new ArithmeticExpr(ArithmeticExpr.Operator.MULTIPLY, new IntLiteral(-1), child); } case StarRocksLexer.PLUS_SYMBOL: return child; case StarRocksLexer.BITNOT: return new ArithmeticExpr(ArithmeticExpr.Operator.BITNOT, child, null); case StarRocksLexer.LOGICAL_NOT: return new CompoundPredicate(CompoundPredicate.Operator.NOT, child, null); default: throw new UnsupportedOperationException("Unsupported sign: " + context.operator.getText()); } } @Override public ParseNode visitArithmeticBinary(StarRocksParser.ArithmeticBinaryContext context) { Expr left = (Expr) visit(context.left); Expr right = (Expr) visit(context.right); if (left instanceof IntervalLiteral) { return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), right, ((IntervalLiteral) left).getValue(), ((IntervalLiteral) left).getUnitIdentifier().getDescription(), true); } if (right instanceof IntervalLiteral) { return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), left, ((IntervalLiteral) right).getValue(), ((IntervalLiteral) right).getUnitIdentifier().getDescription(), false); } return new ArithmeticExpr(getArithmeticBinaryOperator(context.operator), left, right); } private static ArithmeticExpr.Operator getArithmeticBinaryOperator(Token operator) { switch (operator.getType()) { case StarRocksLexer.PLUS_SYMBOL: return ArithmeticExpr.Operator.ADD; case StarRocksLexer.MINUS_SYMBOL: return ArithmeticExpr.Operator.SUBTRACT; case StarRocksLexer.ASTERISK_SYMBOL: return ArithmeticExpr.Operator.MULTIPLY; case StarRocksLexer.SLASH_SYMBOL: return ArithmeticExpr.Operator.DIVIDE; case StarRocksLexer.PERCENT_SYMBOL: return ArithmeticExpr.Operator.MOD; case StarRocksLexer.INT_DIV: return ArithmeticExpr.Operator.INT_DIVIDE; case StarRocksLexer.BITAND: return ArithmeticExpr.Operator.BITAND; case StarRocksLexer.BITOR: return ArithmeticExpr.Operator.BITOR; case StarRocksLexer.BITXOR: return ArithmeticExpr.Operator.BITXOR; } throw new UnsupportedOperationException("Unsupported operator: " + operator.getText()); } @Override public ParseNode visitOdbcFunctionCallExpression(StarRocksParser.OdbcFunctionCallExpressionContext context) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall()); OdbcScalarFunctionCall odbcScalarFunctionCall = new OdbcScalarFunctionCall(functionCallExpr); return odbcScalarFunctionCall.mappingFunction(); } private static final List<String> DATE_FUNCTIONS = Lists.newArrayList("DATE_ADD", "ADDDATE", "DAYS_ADD", "DATE_SUB", "SUBDATE", "DAYS_SUB", "DATE_FLOOR"); @Override public ParseNode visitSimpleFunctionCall(StarRocksParser.SimpleFunctionCallContext context) { String functionName = getQualifiedName(context.qualifiedName()).toString(); if (DATE_FUNCTIONS.contains(functionName.toUpperCase())) { if (context.expression().size() != 2) { throw new ParsingException( functionName + " must as format " + functionName + "(date,INTERVAL expr unit)"); } Expr e1 = (Expr) visit(context.expression(0)); Expr e2 = (Expr) visit(context.expression(1)); if (!(e2 instanceof IntervalLiteral)) { e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY")); } IntervalLiteral intervalLiteral = (IntervalLiteral) e2; return new TimestampArithmeticExpr(functionName, e1, intervalLiteral.getValue(), intervalLiteral.getUnitIdentifier().getDescription()); } if (functionName.equalsIgnoreCase("isnull")) { List<Expr> params = visit(context.expression(), Expr.class); if (params.size() != 1) { throw new SemanticException("No matching function with signature: %s(%s).", functionName, Joiner.on(", ").join(params.stream().map(p -> p.getType().toSql()).collect(toList()))); } return new IsNullPredicate(params.get(0), false); } FunctionCallExpr functionCallExpr = new FunctionCallExpr(getQualifiedName(context.qualifiedName()).toString(), new FunctionParams(false, visit(context.expression(), Expr.class))); if (context.over() != null) { return buildOverClause(functionCallExpr, context.over()); } return functionCallExpr; } @Override public ParseNode visitAggregationFunctionCall(StarRocksParser.AggregationFunctionCallContext context) { String functionName; if (context.aggregationFunction().COUNT() != null) { functionName = "count"; } else if (context.aggregationFunction().AVG() != null) { functionName = "avg"; } else if (context.aggregationFunction().SUM() != null) { functionName = "sum"; } else if (context.aggregationFunction().MIN() != null) { functionName = "min"; } else if (context.aggregationFunction().MAX() != null) { functionName = "max"; } else { throw new StarRocksPlannerException("Aggregate functions are not being parsed correctly", ErrorType.INTERNAL_ERROR); } FunctionCallExpr functionCallExpr = new FunctionCallExpr(functionName, context.aggregationFunction().ASTERISK_SYMBOL() == null ? new FunctionParams(context.aggregationFunction().DISTINCT() != null, visit(context.aggregationFunction().expression(), Expr.class)) : FunctionParams.createStarParam()); if (context.over() != null) { return buildOverClause(functionCallExpr, context.over()); } return functionCallExpr; } @Override public ParseNode visitWindowFunctionCall(StarRocksParser.WindowFunctionCallContext context) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.windowFunction()); return buildOverClause(functionCallExpr, context.over()); } public static final ImmutableSet<String> WindowFunctionSet = ImmutableSet.of( "row_number", "rank", "dense_rank", "ntile", "lead", "lag", "first_value", "last_value"); @Override public ParseNode visitWindowFunction(StarRocksParser.WindowFunctionContext context) { if (WindowFunctionSet.contains(context.name.getText().toLowerCase())) { return new FunctionCallExpr(context.name.getText().toLowerCase(), new FunctionParams(false, visit(context.expression(), Expr.class))); } throw new ParsingException("Unknown window function " + context.name.getText()); } private AnalyticExpr buildOverClause(FunctionCallExpr functionCallExpr, StarRocksParser.OverContext context) { functionCallExpr.setIsAnalyticFnCall(true); List<OrderByElement> orderByElements = new ArrayList<>(); if (context.ORDER() != null) { orderByElements = visit(context.sortItem(), OrderByElement.class); } List<Expr> partitionExprs = visit(context.partition, Expr.class); return new AnalyticExpr(functionCallExpr, partitionExprs, orderByElements, (AnalyticWindow) visitIfPresent(context.windowFrame())); } @Override public ParseNode visitExtract(StarRocksParser.ExtractContext context) { String fieldString = context.identifier().getText(); return new FunctionCallExpr(fieldString, new FunctionParams(Lists.newArrayList((Expr) visit(context.valueExpression())))); } @Override public ParseNode visitCast(StarRocksParser.CastContext context) { return new CastExpr(new TypeDef(getType(context.type())), (Expr) visit(context.expression())); } @Override public ParseNode visitInformationFunctionExpression(StarRocksParser.InformationFunctionExpressionContext context) { if (context.name.getText().equalsIgnoreCase("database") || context.name.getText().equalsIgnoreCase("schema") || context.name.getText().equalsIgnoreCase("user") || context.name.getText().equalsIgnoreCase("current_user") || context.name.getText().equalsIgnoreCase("connection_id")) { return new InformationFunction(context.name.getText().toUpperCase()); } throw new ParsingException("Unknown special function " + context.name.getText()); } @Override public ParseNode visitSpecialFunctionExpression(StarRocksParser.SpecialFunctionExpressionContext context) { if (context.CHAR() != null) { return new FunctionCallExpr("char", visit(context.expression(), Expr.class)); } else if (context.CURRENT_TIMESTAMP() != null) { return new FunctionCallExpr("current_timestamp", Lists.newArrayList()); } else if (context.DAY() != null) { return new FunctionCallExpr("day", visit(context.expression(), Expr.class)); } else if (context.HOUR() != null) { return new FunctionCallExpr("hour", visit(context.expression(), Expr.class)); } else if (context.IF() != null) { return new FunctionCallExpr("if", visit(context.expression(), Expr.class)); } else if (context.LEFT() != null) { return new FunctionCallExpr("left", visit(context.expression(), Expr.class)); } else if (context.LIKE() != null) { return new FunctionCallExpr("like", visit(context.expression(), Expr.class)); } else if (context.MINUTE() != null) { return new FunctionCallExpr("minute", visit(context.expression(), Expr.class)); } else if (context.MOD() != null) { return new FunctionCallExpr("mod", visit(context.expression(), Expr.class)); } else if (context.MONTH() != null) { return new FunctionCallExpr("month", visit(context.expression(), Expr.class)); } else if (context.QUARTER() != null) { return new FunctionCallExpr("quarter", visit(context.expression(), Expr.class)); } else if (context.REGEXP() != null) { return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class)); } else if (context.REPLACE() != null) { return new FunctionCallExpr("replace", visit(context.expression(), Expr.class)); } else if (context.RIGHT() != null) { return new FunctionCallExpr("right", visit(context.expression(), Expr.class)); } else if (context.RLIKE() != null) { return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class)); } else if (context.SECOND() != null) { return new FunctionCallExpr("second", visit(context.expression(), Expr.class)); } else if (context.YEAR() != null) { return new FunctionCallExpr("year", visit(context.expression(), Expr.class)); } else if (context.PASSWORD() != null) { return new StringLiteral(new String(MysqlPassword.makeScrambledPassword(context.string().getText()))); } if (context.TIMESTAMPADD() != null || context.TIMESTAMPDIFF() != null) { String functionName = context.TIMESTAMPADD() != null ? "TIMESTAMPADD" : "TIMESTAMPDIFF"; UnitIdentifier e1 = (UnitIdentifier) visit(context.unitIdentifier()); Expr e2 = (Expr) visit(context.expression(0)); Expr e3 = (Expr) visit(context.expression(1)); return new TimestampArithmeticExpr(functionName, e3, e2, e1.getDescription()); } throw new ParsingException("No matching function with signature: %s(%s).", context.getText(), visit(context.expression(), Expr.class)); } @Override public ParseNode visitConcat(StarRocksParser.ConcatContext context) { Expr left = (Expr) visit(context.left); Expr right = (Expr) visit(context.right); return new FunctionCallExpr("concat", new FunctionParams(Lists.newArrayList(left, right))); } @Override public ParseNode visitNullLiteral(StarRocksParser.NullLiteralContext context) { return new NullLiteral(); } @Override public ParseNode visitBooleanLiteral(StarRocksParser.BooleanLiteralContext context) { try { return new BoolLiteral(context.getText()); } catch (AnalysisException e) { throw new ParsingException("Invalid boolean literal: " + context.getText()); } } @Override public ParseNode visitNumericLiteral(StarRocksParser.NumericLiteralContext context) { return visit(context.number()); } private static final BigInteger LONG_MAX = new BigInteger("9223372036854775807"); private static final BigInteger LARGEINT_MAX_ABS = new BigInteger("170141183460469231731687303715884105728"); @Override public ParseNode visitIntegerValue(StarRocksParser.IntegerValueContext context) { try { BigInteger intLiteral = new BigInteger(context.getText()); if (intLiteral.compareTo(LONG_MAX) <= 0) { return new IntLiteral(intLiteral.longValue()); } else if (intLiteral.compareTo(LARGEINT_MAX_ABS) <= 0) { return new LargeIntLiteral(intLiteral.toString()); } else { throw new ParsingException("Numeric overflow " + intLiteral); } } catch (NumberFormatException | AnalysisException e) { throw new ParsingException("Invalid numeric literal: " + context.getText()); } } @Override public ParseNode visitDoubleValue(StarRocksParser.DoubleValueContext context) { try { BigDecimal decimal = new BigDecimal(context.getText()); int precision = DecimalLiteral.getRealPrecision(decimal); int scale = DecimalLiteral.getRealScale(decimal); int integerPartWidth = precision - scale; if (integerPartWidth > 38) { return new FloatLiteral(context.getText()); } return new DecimalLiteral(decimal); } catch (AnalysisException | NumberFormatException e) { throw new ParsingException(e.getMessage()); } } @Override public ParseNode visitDecimalValue(StarRocksParser.DecimalValueContext context) { try { return new DecimalLiteral(context.getText()); } catch (AnalysisException e) { throw new ParsingException(e.getMessage()); } } @Override public ParseNode visitString(StarRocksParser.StringContext context) { String quotedString; if (context.SINGLE_QUOTED_TEXT() != null) { quotedString = context.SINGLE_QUOTED_TEXT().getText(); return new StringLiteral(escapeBackSlash(quotedString.substring(1, quotedString.length() - 1))); } else { quotedString = context.DOUBLE_QUOTED_TEXT().getText(); return new StringLiteral(escapeBackSlash(quotedString.substring(1, quotedString.length() - 1)) .replace("\"\"", "\"")); } } private static String escapeBackSlash(String str) { StringWriter writer = new StringWriter(); int strLen = str.length(); for (int i = 0; i < strLen; ++i) { char c = str.charAt(i); if (c == '\\' && (i + 1) < strLen) { switch (str.charAt(i + 1)) { case 'n': writer.append('\n'); break; case 't': writer.append('\t'); break; case 'r': writer.append('\r'); break; case 'b': writer.append('\b'); break; case '0': writer.append('\0'); break; case 'Z': writer.append('\032'); break; case '_': case '%': writer.append('\\'); /* Fall through */ default: writer.append(str.charAt(i + 1)); break; } i++; } else { writer.append(c); } } return writer.toString(); } @Override public ParseNode visitArrayConstructor(StarRocksParser.ArrayConstructorContext context) { if (context.arrayType() != null) { return new ArrayExpr( new ArrayType(getType(context.arrayType().type())), visit(context.expression(), Expr.class)); } return new ArrayExpr(null, visit(context.expression(), Expr.class)); } @Override public ParseNode visitArraySubscript(StarRocksParser.ArraySubscriptContext context) { Expr value = (Expr) visit(context.value); Expr index = (Expr) visit(context.index); return new ArrayElementExpr(value, index); } @Override public ParseNode visitArraySlice(StarRocksParser.ArraySliceContext context) { throw new ParsingException("Array slice is not currently supported"); /* Expr expr = (Expr) visit(context.primaryExpression()); IntLiteral lowerBound; if (context.start != null) { lowerBound = new IntLiteral(Long.parseLong(context.start.getText())); } else { lowerBound = new IntLiteral(0); } IntLiteral upperBound; if (context.end != null) { upperBound = new IntLiteral(Long.parseLong(context.end.getText())); } else { upperBound = new IntLiteral(-1); } return new ArraySliceExpr(expr, lowerBound, upperBound); */ } @Override public ParseNode visitInterval(StarRocksParser.IntervalContext context) { return new IntervalLiteral((Expr) visit(context.value), (UnitIdentifier) visit(context.from)); } @Override public ParseNode visitUnitIdentifier(StarRocksParser.UnitIdentifierContext context) { return new UnitIdentifier(context.getText()); } @Override public ParseNode visitTypeConstructor(StarRocksParser.TypeConstructorContext context) { String value = ((StringLiteral) visit(context.string())).getValue(); try { if (context.DATE() != null) { return new DateLiteral(value, Type.DATE); } if (context.DATETIME() != null) { return new DateLiteral(value, Type.DATETIME); } } catch (AnalysisException e) { throw new ParsingException(e.getMessage()); } throw new ParsingException("Parse Error : unknown type " + context.getText()); } @Override public ParseNode visitColumnReference(StarRocksParser.ColumnReferenceContext context) { if (context.identifier() != null) { Identifier identifier = (Identifier) visit(context.identifier()); return new SlotRef(null, identifier.getValue(), identifier.getValue()); } else { QualifiedName qualifiedName = getQualifiedName(context.qualifiedName()); if (qualifiedName.getParts().size() == 3) { return new SlotRef(new TableName(qualifiedName.getParts().get(0), qualifiedName.getParts().get(1)), qualifiedName.getParts().get(2), qualifiedName.getParts().get(2)); } else if (qualifiedName.getParts().size() == 2) { return new SlotRef(new TableName(null, qualifiedName.getParts().get(0)), qualifiedName.getParts().get(1), qualifiedName.getParts().get(1)); } else { throw new ParsingException("Unqualified column reference " + qualifiedName); } } } @Override public ParseNode visitArrowExpression(StarRocksParser.ArrowExpressionContext context) { Expr expr = (Expr) visit(context.primaryExpression()); StringLiteral stringLiteral = (StringLiteral) visit(context.string()); return new ArrowExpr(expr, stringLiteral); } @Override public ParseNode visitVariable(StarRocksParser.VariableContext context) { SetType setType = SetType.DEFAULT; if (context.GLOBAL() != null) { setType = SetType.GLOBAL; } else if (context.LOCAL() != null || context.SESSION() != null) { setType = SetType.SESSION; } return new SysVariableDesc(((Identifier) visit(context.identifier())).getValue(), setType); } @Override public ParseNode visitCollate(StarRocksParser.CollateContext context) { return visit(context.primaryExpression()); } @Override public ParseNode visitParenthesizedExpression(StarRocksParser.ParenthesizedExpressionContext context) { return visit(context.expression()); } @Override public ParseNode visitUnquotedIdentifier(StarRocksParser.UnquotedIdentifierContext context) { return new Identifier(context.getText()); } @Override public ParseNode visitBackQuotedIdentifier(StarRocksParser.BackQuotedIdentifierContext context) { return new Identifier(context.getText().replace("`", "")); } @Override public ParseNode visitDigitIdentifier(StarRocksParser.DigitIdentifierContext context) { return new Identifier(context.getText()); } private static StatementBase.ExplainLevel getExplainType(StarRocksParser.ExplainDescContext context) { StatementBase.ExplainLevel explainLevel = StatementBase.ExplainLevel.NORMAL; if (context.LOGICAL() != null) { explainLevel = StatementBase.ExplainLevel.LOGICAL; } else if (context.VERBOSE() != null) { explainLevel = StatementBase.ExplainLevel.VERBOSE; } else if (context.COSTS() != null) { explainLevel = StatementBase.ExplainLevel.COST; } return explainLevel; } public static SetType getVariableType(StarRocksParser.VarTypeContext context) { if (context == null) { return SetType.DEFAULT; } if (context.GLOBAL() != null) { return SetType.GLOBAL; } else if (context.LOCAL() != null || context.SESSION() != null) { return SetType.SESSION; } else { return SetType.DEFAULT; } } @Override public ParseNode visitAssignment(StarRocksParser.AssignmentContext context) { String column = ((Identifier) visit(context.identifier())).getValue(); Expr expr = (Expr) visit(context.expressionOrDefault()); return new ColumnAssignment(column, expr); } @Override public ParseNode visitPartitionDesc(StarRocksParser.PartitionDescContext context) { List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class); List<PartitionDesc> partitionDesc = visit(context.rangePartitionDesc(), PartitionDesc.class); return new RangePartitionDesc( identifierList.stream().map(Identifier::getValue).collect(toList()), partitionDesc); } @Override public ParseNode visitSingleRangePartition(StarRocksParser.SingleRangePartitionContext context) { PartitionKeyDesc partitionKeyDesc = (PartitionKeyDesc) visit(context.partitionKeyDesc()); return new SingleRangePartitionDesc(false, ((Identifier) visit(context.identifier())).getValue(), partitionKeyDesc, null); } @Override public ParseNode visitMultiRangePartition(StarRocksParser.MultiRangePartitionContext context) { if (context.interval() != null) { IntervalLiteral intervalLiteral = (IntervalLiteral) visit(context.interval()); Expr expr = intervalLiteral.getValue(); long intervalVal; if (expr instanceof IntLiteral) { intervalVal = ((IntLiteral) expr).getLongValue(); } else { throw new IllegalArgumentException("Unsupported interval expr: " + expr); } return new MultiRangePartitionDesc( ((StringLiteral) visit(context.string(0))).getStringValue(), ((StringLiteral) visit(context.string(1))).getStringValue(), intervalVal, intervalLiteral.getUnitIdentifier().getDescription()); } else { return new MultiRangePartitionDesc( ((StringLiteral) visit(context.string(0))).getStringValue(), ((StringLiteral) visit(context.string(1))).getStringValue(), Long.parseLong(context.INTEGER_VALUE().getText())); } } @Override public ParseNode visitPartitionKeyDesc(StarRocksParser.PartitionKeyDescContext context) { PartitionKeyDesc partitionKeyDesc; if (context.LESS() != null) { List<PartitionValue> partitionValueList = visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class); partitionKeyDesc = new PartitionKeyDesc(partitionValueList); } else { List<PartitionValue> lowerPartitionValueList = visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class); List<PartitionValue> upperPartitionValueList = visit(context.partitionValueList().get(1).partitionValue(), PartitionValue.class); partitionKeyDesc = new PartitionKeyDesc(lowerPartitionValueList, upperPartitionValueList); } return partitionKeyDesc; } @Override public ParseNode visitPartitionValue(StarRocksParser.PartitionValueContext context) { if (context.MAXVALUE() != null) { return PartitionValue.MAX_VALUE; } else { return new PartitionValue(((StringLiteral) visit(context.string())).getStringValue()); } } @Override public ParseNode visitDistributionDesc(StarRocksParser.DistributionDescContext context) { int buckets = 10; if (context.INTEGER_VALUE() != null) { buckets = Integer.parseInt(context.INTEGER_VALUE().getText()); } List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class); return new HashDistributionDesc(buckets, identifierList.stream().map(Identifier::getValue).collect(toList())); } @Override public ParseNode visitRefreshSchemeDesc(StarRocksParser.RefreshSchemeDescContext context) { LocalDateTime startTime = LocalDateTime.now(); IntervalLiteral intervalLiteral = null; if (context.ASYNC() != null) { if (context.START() != null) { StringLiteral stringLiteral = (StringLiteral) visit(context.string()); DateTimeFormatter dateTimeFormatter = null; try { dateTimeFormatter = DateUtils.probeFormat(stringLiteral.getStringValue()); LocalDateTime tempStartTime = DateUtils. parseStringWithDefaultHSM(stringLiteral.getStringValue(), dateTimeFormatter); if (tempStartTime.isBefore(LocalDateTime.now())) { throw new IllegalArgumentException("Refresh start must be after current time"); } startTime = tempStartTime; } catch (AnalysisException e) { throw new IllegalArgumentException( "Refresh start " + stringLiteral.getStringValue() + " is incorrect"); } } intervalLiteral = (IntervalLiteral) visit(context.interval()); if (!(intervalLiteral.getValue() instanceof IntLiteral)) { throw new IllegalArgumentException( "Refresh every " + intervalLiteral.getValue() + " must be IntLiteral"); } return new AsyncRefreshSchemeDesc(startTime, intervalLiteral); } else if (context.SYNC() != null) { return new SyncRefreshSchemeDesc(); } else if (context.MANUAL() != null) { return new ManualRefreshSchemeDesc(); } return null; } @Override public ParseNode visitProperty(StarRocksParser.PropertyContext context) { return new Property( ((StringLiteral) visit(context.key)).getStringValue(), ((StringLiteral) visit(context.value)).getStringValue()); } @Override public ParseNode visitOutfile(StarRocksParser.OutfileContext context) { Map<String, String> properties = new HashMap<>(); if (context.properties() != null) { List<Property> propertyList = visit(context.properties().property(), Property.class); for (Property property : propertyList) { properties.put(property.getKey(), property.getValue()); } } String format = null; if (context.fileFormat() != null) { if (context.fileFormat().identifier() != null) { format = ((Identifier) visit(context.fileFormat().identifier())).getValue(); } else if (context.fileFormat().string() != null) { format = ((StringLiteral) visit(context.fileFormat().string())).getStringValue(); } } return new OutFileClause( ((StringLiteral) visit(context.file)).getStringValue(), format, properties); } @Override public ParseNode visitColumnNameWithComment(StarRocksParser.ColumnNameWithCommentContext context) { String comment = null; if (context.comment() != null) { comment = ((StringLiteral) visit(context.comment())).getStringValue(); } return new ColWithComment(((Identifier) visit(context.identifier())).getValue(), comment); } @Override public ParseNode visitIdentifierOrString(StarRocksParser.IdentifierOrStringContext context) { String s = null; if (context.identifier() != null) { s = ((Identifier) visit(context.identifier())).getValue(); } else if (context.string() != null) { s = ((StringLiteral) visit(context.string())).getStringValue(); } return new Identifier(s); } @Override public ParseNode visitUserWithHostAndBlanket(StarRocksParser.UserWithHostAndBlanketContext context) { Identifier user = (Identifier) visit(context.identifierOrString(0)); Identifier host = (Identifier) visit(context.identifierOrString(1)); return new UserIdentifier(user.getValue(), host.getValue(), true); } @Override public ParseNode visitUserWithHost(StarRocksParser.UserWithHostContext context) { Identifier user = (Identifier) visit(context.identifierOrString(0)); Identifier host = (Identifier) visit(context.identifierOrString(1)); return new UserIdentifier(user.getValue(), host.getValue(), false); } @Override public ParseNode visitUserWithoutHost(StarRocksParser.UserWithoutHostContext context) { Identifier user = (Identifier) visit(context.identifierOrString()); return new UserIdentifier(user.getValue(), "%", false); } private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz) { return contexts.stream() .map(this::visit) .map(clazz::cast) .collect(toList()); } private <T> List<T> visitIfPresent(List<? extends ParserRuleContext> contexts, Class<T> clazz) { if (contexts != null && contexts.size() != 0) { return contexts.stream() .map(this::visit) .map(clazz::cast) .collect(toList()); } else { return null; } } private ParseNode visitIfPresent(ParserRuleContext context) { if (context != null) { return visit(context); } else { return null; } } private QualifiedName getQualifiedName(StarRocksParser.QualifiedNameContext context) { List<String> parts = visit(context.identifier(), Identifier.class).stream() .map(Identifier::getValue) .collect(Collectors.toList()); return QualifiedName.of(parts); } private TableName qualifiedNameToTableName(QualifiedName qualifiedName) { List<String> parts = qualifiedName.getParts(); if (parts.size() == 3) { return new TableName(parts.get(0), parts.get(1), parts.get(2)); } else if (parts.size() == 2) { return new TableName(null, qualifiedName.getParts().get(0), qualifiedName.getParts().get(1)); } else if (parts.size() == 1) { return new TableName(null, null, qualifiedName.getParts().get(0)); } else { throw new ParsingException("error table name "); } } private Type getType(StarRocksParser.TypeContext type) { Integer length = null; Integer precision = null; Integer scale = null; if (type.baseType() != null) { if (type.baseType().typeParameter() != null) { length = Integer.parseInt(type.baseType().typeParameter().INTEGER_VALUE().toString()); } return ScalarType.createTypeFromParser(type.baseType().getText(), length, precision, scale); } else if (type.decimalType() != null) { if (type.precision != null) { precision = Integer.parseInt(type.precision.getText()); scale = type.scale == null ? ScalarType.DEFAULT_SCALE : Integer.parseInt(type.scale.getText()); } return ScalarType.createTypeFromParser(type.decimalType().getText(), length, precision, scale); } else if (type.arrayType() != null) { StarRocksParser.ArrayTypeContext arrayTypeContext = type.arrayType(); return new ArrayType(getType(arrayTypeContext.type())); } throw new IllegalArgumentException("Unsupported type specification: " + type.getText()); } }
It does (if argument is deployment id the config server will be used)
private boolean testerContainersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { if (useConfigServerForTesterAPI(zoneId)) { DeploymentId deploymentId = new DeploymentId(id, zoneId); if (controller.jobController().cloud().testerReady(deploymentId)) { return true; } else { logger.log("Failed to get 100 consecutive OKs from tester container for " + deploymentId); return false; } } else { return containersAreUp(id, zoneId, logger); } }
if (controller.jobController().cloud().testerReady(deploymentId)) {
private boolean testerContainersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { if (useConfigServerForTesterAPI(zoneId)) { DeploymentId deploymentId = new DeploymentId(id, zoneId); if (controller.jobController().cloud().testerReady(deploymentId)) { return true; } else { logger.log("Failed to get 100 consecutive OKs from tester container for " + deploymentId); return false; } } else { return containersAreUp(id, zoneId, logger); } }
class InternalStepRunner implements StepRunner { private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName()); private static final NodeResources DEFAULT_TESTER_RESOURCES = new NodeResources(1, 4, 50, 0.3, NodeResources.DiskSpeed.any); private static final NodeResources DEFAULT_TESTER_RESOURCES_AWS = new NodeResources(2, 8, 50, 0.3, NodeResources.DiskSpeed.any); static final Duration endpointTimeout = Duration.ofMinutes(15); static final Duration testerTimeout = Duration.ofMinutes(30); static final Duration installationTimeout = Duration.ofMinutes(60); static final Duration certificateTimeout = Duration.ofMinutes(300); private final Controller controller; private final TestConfigSerializer testConfigSerializer; private final DeploymentFailureMails mails; public InternalStepRunner(Controller controller) { this.controller = controller; this.testConfigSerializer = new TestConfigSerializer(controller.system()); this.mails = new DeploymentFailureMails(controller.zoneRegistry()); } @Override public Optional<RunStatus> run(LockedStep step, RunId id) { DualLogger logger = new DualLogger(id, step.get()); try { switch (step.get()) { case deployTester: return deployTester(id, logger); case deployInitialReal: return deployInitialReal(id, logger); case installInitialReal: return installInitialReal(id, logger); case deployReal: return deployReal(id, logger); case installTester: return installTester(id, logger); case installReal: return installReal(id, logger); case startStagingSetup: return startTests(id, true, logger); case endStagingSetup: return endTests(id, logger); case startTests: return startTests(id, false, logger); case endTests: return endTests(id, logger); case copyVespaLogs: return copyVespaLogs(id, logger); case deactivateReal: return deactivateReal(id, logger); case deactivateTester: return deactivateTester(id, logger); case report: return report(id, logger); default: throw new AssertionError("Unknown step '" + step + "'!"); } } catch (UncheckedIOException e) { logger.logWithInternalException(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e), e); return Optional.empty(); } catch (RuntimeException e) { logger.log(WARNING, "Unexpected exception running " + id, e); if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) { logger.log("Will keep trying, as this is a cleanup step."); return Optional.empty(); } return Optional.of(error); } } private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.sourcePlatform().orElse(versions.targetPlatform()) + " and application version " + versions.sourceApplication().orElse(versions.targetApplication()).id() + " ..."); return deployReal(id, true, versions, logger); } private Optional<RunStatus> deployReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.targetPlatform() + " and application version " + versions.targetApplication().id() + " ..."); return deployReal(id, false, versions, logger); } private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, Versions versions, DualLogger logger) { Optional<ApplicationPackage> applicationPackage = id.type().environment().isManuallyDeployed() ? Optional.of(new ApplicationPackage(controller.applications().applicationStore() .getDev(id.application(), id.type().zone(controller.system())))) : Optional.empty(); Optional<Version> vespaVersion = id.type().environment().isManuallyDeployed() ? Optional.of(versions.targetPlatform()) : Optional.empty(); return deploy(id.application(), id.type(), () -> controller.applications().deploy(id.application(), id.type().zone(controller.system()), applicationPackage, new DeployOptions(false, vespaVersion, false, setTheStage)), controller.jobController().run(id).get() .stepInfo(setTheStage ? deployInitialReal : deployReal).get() .startTime().get(), logger); } private Optional<RunStatus> deployTester(RunId id, DualLogger logger) { Version platform = controller.systemVersion(); logger.log("Deploying the tester container on platform " + platform + " ..."); return deploy(id.tester().id(), id.type(), () -> controller.applications().deployTester(id.tester(), testerPackage(id), id.type().zone(controller.system()), new DeployOptions(true, Optional.of(platform), false, false)), controller.jobController().run(id).get() .stepInfo(deployTester).get() .startTime().get(), logger); } private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, Instant startTime, DualLogger logger) { try { PrepareResponse prepareResponse = deployment.get().prepareResponse(); if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) { List<String> messages = new ArrayList<>(); messages.add("Deploy failed due to non-compatible changes that require re-feed."); messages.add("Your options are:"); messages.add("1. Revert the incompatible changes."); messages.add("2. If you think it is safe in your case, you can override this validation, see"); messages.add(" http: messages.add("3. Deploy as a new application under a different name."); messages.add("Illegal actions:"); prepareResponse.configChangeActions.refeedActions.stream() .filter(action -> ! action.allowed) .flatMap(action -> action.messages.stream()) .forEach(messages::add); messages.add("Details:"); prepareResponse.log.stream() .map(entry -> entry.message) .forEach(messages::add); logger.log(messages); return Optional.of(deploymentFailed); } if (prepareResponse.configChangeActions.restartActions.isEmpty()) logger.log("No services requiring restart."); else prepareResponse.configChangeActions.restartActions.stream() .flatMap(action -> action.services.stream()) .map(service -> service.hostName) .sorted().distinct() .map(Hostname::new) .forEach(hostname -> { controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname)); logger.log("Restarting services on host " + hostname.id() + "."); }); logger.log("Deployment successful."); if (prepareResponse.message != null) logger.log(prepareResponse.message); return Optional.of(running); } catch (ConfigServerException e) { Optional<RunStatus> result = startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(deploymentFailed) : Optional.empty(); switch (e.getErrorCode()) { case ACTIVATION_CONFLICT: case APPLICATION_LOCK_FAILURE: case CERTIFICATE_NOT_READY: logger.log("Deployment failed with possibly transient error " + e.getErrorCode() + ", will retry: " + e.getMessage()); return result; case LOAD_BALANCER_NOT_READY: case PARENT_HOST_NOT_READY: logger.log(e.getServerMessage()); return result; case OUT_OF_CAPACITY: logger.log(e.getServerMessage()); return Optional.of(outOfCapacity); case INVALID_APPLICATION_PACKAGE: case BAD_REQUEST: logger.log(e.getMessage()); return Optional.of(deploymentFailed); } throw e; } } private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) { return installReal(id, true, logger); } private Optional<RunStatus> installReal(RunId id, DualLogger logger) { return installReal(id, false, logger); } private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before installation was successful."); return Optional.of(installationFailed); } Versions versions = controller.jobController().run(id).get().versions(); Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform(); Run run = controller.jobController().run(id).get(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(id.application(), id.type().zone(controller.system())), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); Step step = setTheStage ? installInitialReal : installReal; return run.stepInfo(step).get().startTime().get().isBefore(controller.clock().instant().minus(Duration.ofMinutes(5))) ? Optional.of(error) : Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), id.application(), ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); boolean firstTick = run.convergenceSummary().isEmpty(); if (firstTick) { logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); } ConvergenceSummary summary = nodeList.summary(); if (summary.converged()) { controller.jobController().locked(id, lockedRun -> lockedRun.withSummary(null)); if (endpointsAvailable(id.application(), id.type().zone(controller.system()), logger)) { if (containersAreUp(id.application(), id.type().zone(controller.system()), logger)) { logger.log("Installation succeeded!"); return Optional.of(running); } } else if (timedOut(id, deployment.get(), endpointTimeout)) { logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!"); return Optional.of(error); } } boolean failed = false; NodeList suspendedTooLong = nodeList.suspendedSince(controller.clock().instant().minus(installationTimeout)); if ( ! suspendedTooLong.isEmpty()) { logger.log(INFO, "Some nodes have been suspended for more than " + installationTimeout.toMinutes() + " minutes."); failed = true; } if (run.noNodesDownSince() .map(since -> since.isBefore(controller.clock().instant().minus(installationTimeout))) .orElse(false)) { if (summary.needPlatformUpgrade() > 0 || summary.needReboot() > 0 || summary.needRestart() > 0) logger.log(INFO, "No nodes allowed to suspend to progress installation for " + installationTimeout.toMinutes() + " minutes."); else logger.log(INFO, "Nodes not able to start with new application package."); failed = true; } Duration timeout = JobRunner.jobTimeout.minusHours(1); if (timedOut(id, deployment.get(), timeout)) { logger.log(INFO, "Installation failed to complete within " + timeout.toHours() + "hours!"); failed = true; } if (failed) { logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); return Optional.of(installationFailed); } if ( ! firstTick) logger.log(nodeList.allowedDown().asList().stream() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); controller.jobController().locked(id, lockedRun -> { Instant noNodesDownSince = summary.down() == 0 ? lockedRun.noNodesDownSince().orElse(controller.clock().instant()) : null; return lockedRun.noNodesDownSince(noNodesDownSince).withSummary(summary); }); return Optional.empty(); } private Optional<RunStatus> installTester(RunId id, DualLogger logger) { Run run = controller.jobController().run(id).get(); Version platform = controller.systemVersion(); ZoneId zone = id.type().zone(controller.system()); ApplicationId testerId = id.tester().id(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(testerId, zone), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); return run.stepInfo(installTester).get().startTime().get().isBefore(controller.clock().instant().minus(Duration.ofMinutes(5))) ? Optional.of(error) : Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(zone, testerId, ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(zone, nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); if (nodeList.summary().converged()) { if (endpointsAvailable(testerId, zone, logger)) { if (testerContainersAreUp(testerId, zone, logger)) { logger.log("Tester container successfully installed!"); return Optional.of(running); } } else if (run.stepInfo(installTester).get().startTime().get().plus(endpointTimeout).isBefore(controller.clock().instant())) { logger.log(WARNING, "Tester failed to show up within " + endpointTimeout.toMinutes() + " minutes!"); return Optional.of(error); } } if (run.stepInfo(installTester).get().startTime().get().plus(testerTimeout).isBefore(controller.clock().instant())) { logger.log(WARNING, "Installation of tester failed to complete within " + testerTimeout.toMinutes() + " minutes!"); return Optional.of(error); } return Optional.empty(); } /** Returns true iff all containers in the deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean containersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zoneId))); if ( ! endpoints.containsKey(zoneId)) return false; for (URI endpoint : endpoints.get(zoneId).values()) { boolean ready = id.instance().isTester() ? controller.jobController().cloud().testerReady(endpoint) : controller.jobController().cloud().ready(endpoint); if (!ready) { logger.log("Failed to get 100 consecutive OKs from " + endpoint); return false; } } return true; } /** Returns true iff all containers in the tester deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean endpointsAvailable(ApplicationId id, ZoneId zone, DualLogger logger) { var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zone))); if ( ! endpoints.containsKey(zone)) { logger.log("Endpoints not yet ready."); return false; } for (var endpoint : endpoints.get(zone).values()) if ( ! controller.jobController().cloud().exists(endpoint)) { logger.log(INFO, "DNS lookup yielded no IP address for '" + endpoint + "'."); return false; } logEndpoints(endpoints, logger); return true; } private void logEndpoints(Map<ZoneId, Map<ClusterSpec.Id, URI>> endpoints, DualLogger logger) { List<String> messages = new ArrayList<>(); messages.add("Found endpoints:"); endpoints.forEach((zone, uris) -> { messages.add("- " + zone); uris.forEach((cluster, uri) -> messages.add(" |-- " + uri + " (" + cluster + ")")); }); logger.log(messages); } private Stream<String> nodeDetails(NodeWithServices node, boolean printAllServices) { return Stream.concat(Stream.of(node.node().hostname() + ": " + humanize(node.node().serviceState()), "--- platform " + node.node().wantedVersion() + (node.needsPlatformUpgrade() ? " <-- " + (node.node().currentVersion().isEmpty() ? "not booted" : node.node().currentVersion()) : "") + (node.needsOsUpgrade() && node.isAllowedDown() ? ", upgrading OS (" + node.node().wantedOsVersion() + " <-- " + node.node().currentOsVersion() + ")" : "") + (node.needsFirmwareUpgrade() && node.isAllowedDown() ? ", upgrading firmware" : "") + (node.needsRestart() ? ", restart pending (" + node.node().wantedRestartGeneration() + " <-- " + node.node().restartGeneration() + ")" : "") + (node.needsReboot() ? ", reboot pending (" + node.node().wantedRebootGeneration() + " <-- " + node.node().rebootGeneration() + ")" : "")), node.services().stream() .filter(service -> printAllServices || node.needsNewConfig()) .map(service -> "--- " + service.type() + " on port " + service.port() + (service.currentGeneration() == -1 ? " has not started " : " has config generation " + service.currentGeneration() + ", wanted is " + node.wantedConfigGeneration()))); } private String humanize(Node.ServiceState state) { switch (state) { case allowedDown: return "allowed to be DOWN"; case expectedUp: return "expected to be UP"; case unorchestrated: return "unorchestrated"; default: return state.name(); } } private Optional<RunStatus> startTests(RunId id, boolean isSetup, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before tests could start."); return Optional.of(error); } var deployments = controller.applications().requireInstance(id.application()) .productionDeployments().keySet().stream() .map(zone -> new DeploymentId(id.application(), zone)) .collect(Collectors.toSet()); ZoneId zoneId = id.type().zone(controller.system()); deployments.add(new DeploymentId(id.application(), zoneId)); logger.log("Attempting to find endpoints ..."); var endpoints = controller.applications().clusterEndpoints(deployments); if ( ! endpoints.containsKey(zoneId)) { logger.log(WARNING, "Endpoints for the deployment to test vanished again, while it was still active!"); return Optional.of(error); } logEndpoints(endpoints, logger); Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id); if (useConfigServerForTesterAPI(zoneId)) { if ( ! controller.serviceRegistry().configServer().isTesterReady(getTesterDeploymentId(id))) { logger.log(WARNING, "Tester container went bad!"); return Optional.of(error); } } else { if (testerEndpoint.isEmpty()) { logger.log(WARNING, "Endpoints for the tester container vanished again, while it was still active!"); return Optional.of(error); } if ( ! controller.jobController().cloud().testerReady(testerEndpoint.get())) { logger.log(WARNING, "Tester container went bad!"); return Optional.of(error); } } logger.log("Starting tests ..."); TesterCloud.Suite suite = TesterCloud.Suite.of(id.type(), isSetup); byte[] config = testConfigSerializer.configJson(id.application(), id.type(), true, endpoints, controller.applications().contentClustersByZone(deployments)); if (useConfigServerForTesterAPI(zoneId)) { controller.serviceRegistry().configServer().startTests(getTesterDeploymentId(id), suite, config); } else { controller.jobController().cloud().startTests(testerEndpoint.get(), suite, config); } return Optional.of(running); } private boolean testerReady(RunId id, URI testerEndpoint) { if (useConfigServerForTesterAPI(id.type().zone(controller.system()))) { return controller.serviceRegistry().configServer().isTesterReady(getTesterDeploymentId(id)); } else { return controller.jobController().cloud().testerReady(testerEndpoint); } } private Optional<RunStatus> endTests(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isEmpty()) { logger.log(INFO, "Deployment expired before tests could complete."); return Optional.of(aborted); } Optional<X509Certificate> testerCertificate = controller.jobController().run(id).get().testerCertificate(); if (testerCertificate.isPresent()) { try { testerCertificate.get().checkValidity(Date.from(controller.clock().instant())); } catch (CertificateExpiredException | CertificateNotYetValidException e) { logger.log(INFO, "Tester certificate expired before tests could complete."); return Optional.of(aborted); } } controller.jobController().updateTestLog(id); TesterCloud.Status testStatus; if (useConfigServerForTesterAPI(id.type().zone(controller.system()))) { testStatus = controller.serviceRegistry().configServer().getTesterStatus(getTesterDeploymentId(id)); } else { Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id); if (testerEndpoint.isEmpty()) { logger.log("Endpoints for tester not found -- trying again later."); return Optional.empty(); } testStatus = controller.jobController().cloud().getStatus(testerEndpoint.get()); } switch (testStatus) { case NOT_STARTED: throw new IllegalStateException("Tester reports tests not started, even though they should have!"); case RUNNING: return Optional.empty(); case FAILURE: logger.log("Tests failed."); return Optional.of(testFailure); case ERROR: logger.log(INFO, "Tester failed running its tests!"); return Optional.of(error); case SUCCESS: logger.log("Tests completed successfully."); return Optional.of(running); default: throw new IllegalStateException("Unknown status '" + testStatus + "'!"); } } private Optional<RunStatus> copyVespaLogs(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isPresent()) try { controller.jobController().updateVespaLog(id); } catch (Exception e) { logger.log(INFO, "Failure getting vespa logs for " + id, e); return Optional.of(error); } return Optional.of(running); } private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) { try { logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.applications().deactivate(id.application(), id.type().zone(controller.system())); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting application " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateReal).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) { try { logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.jobController().deactivateTester(id.tester(), id.type()); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting tester of " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateTester).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> report(RunId id, DualLogger logger) { try { controller.jobController().active(id).ifPresent(run -> { if (run.hasFailed()) sendNotification(run, logger); }); } catch (IllegalStateException e) { logger.log(INFO, "Job '" + id.type() + "' no longer supposed to run?", e); return Optional.of(error); } return Optional.of(running); } /** Sends a mail with a notification of a failed run, if one should be sent. */ private void sendNotification(Run run, DualLogger logger) { Application application = controller.applications().requireApplication(TenantAndApplicationId.from(run.id().application())); Notifications notifications = application.deploymentSpec().requireInstance(run.id().application().instance()).notifications(); boolean newCommit = application.require(run.id().application().instance()).change().application() .map(run.versions().targetApplication()::equals) .orElse(false); When when = newCommit ? failingCommit : failing; List<String> recipients = new ArrayList<>(notifications.emailAddressesFor(when)); if (notifications.emailRolesFor(when).contains(author)) run.versions().targetApplication().authorEmail().ifPresent(recipients::add); if (recipients.isEmpty()) return; try { if (run.status() == outOfCapacity && run.id().type().isProduction()) controller.serviceRegistry().mailer().send(mails.outOfCapacity(run.id(), recipients)); if (run.status() == deploymentFailed) controller.serviceRegistry().mailer().send(mails.deploymentFailure(run.id(), recipients)); if (run.status() == installationFailed) controller.serviceRegistry().mailer().send(mails.installationFailure(run.id(), recipients)); if (run.status() == testFailure) controller.serviceRegistry().mailer().send(mails.testFailure(run.id(), recipients)); if (run.status() == error) controller.serviceRegistry().mailer().send(mails.systemError(run.id(), recipients)); } catch (RuntimeException e) { logger.log(INFO, "Exception trying to send mail for " + run.id(), e); } } /** Returns the deployment of the real application in the zone of the given job, if it exists. */ private Optional<Deployment> deployment(ApplicationId id, JobType type) { return Optional.ofNullable(application(id).deployments().get(type.zone(controller.system()))); } /** Returns the real application with the given id. */ private Instance application(ApplicationId id) { controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), __ -> { }); return controller.applications().requireInstance(id); } /** * Returns whether the time since deployment is more than the zone deployment expiry, or the given timeout. * * We time out the job before the deployment expires, for zones where deployments are not persistent, * to be able to collect the Vespa log from the deployment. Thus, the lower of the zone's deployment expiry, * and the given default installation timeout, minus one minute, is used as a timeout threshold. */ private boolean timedOut(RunId id, Deployment deployment, Duration defaultTimeout) { Run run = controller.jobController().run(id).get(); if ( ! controller.system().isCd() && run.start().isAfter(deployment.at())) return false; Duration timeout = controller.zoneRegistry().getDeploymentTimeToLive(deployment.zone()) .filter(zoneTimeout -> zoneTimeout.compareTo(defaultTimeout) < 0) .orElse(defaultTimeout); return deployment.at().isBefore(controller.clock().instant().minus(timeout.minus(Duration.ofMinutes(1)))); } /** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */ private ApplicationPackage testerPackage(RunId id) { ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication(); DeploymentSpec spec = controller.applications().requireApplication(TenantAndApplicationId.from(id.application())).deploymentSpec(); ZoneId zone = id.type().zone(controller.system()); boolean useTesterCertificate = controller.system().isPublic() && id.type().environment().isTest(); byte[] servicesXml = servicesXml(controller.zoneRegistry().accessControlDomain(), ! controller.system().isPublic(), useTesterCertificate, testerFlavorFor(id, spec) .map(NodeResources::fromLegacyName) .orElse(zone.region().value().contains("aws-") ? DEFAULT_TESTER_RESOURCES_AWS : DEFAULT_TESTER_RESOURCES)); byte[] testPackage = controller.applications().applicationStore().getTester(id.application().tenant(), id.application().application(), version); byte[] deploymentXml = deploymentXml(id.tester(), spec.athenzDomain(), spec.requireInstance(id.application().instance()).athenzService(zone.environment(), zone.region())); try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) { zipBuilder.add(testPackage); zipBuilder.add("services.xml", servicesXml); zipBuilder.add("deployment.xml", deploymentXml); if (useTesterCertificate) appendAndStoreCertificate(zipBuilder, id); zipBuilder.close(); return new ApplicationPackage(zipBuilder.toByteArray()); } } private void appendAndStoreCertificate(ZipBuilder zipBuilder, RunId id) { KeyPair keyPair = KeyUtils.generateKeypair(KeyAlgorithm.RSA, 2048); X500Principal subject = new X500Principal("CN=" + id.tester().id().toFullString() + "." + id.type() + "." + id.number()); X509Certificate certificate = X509CertificateBuilder.fromKeypair(keyPair, subject, controller.clock().instant(), controller.clock().instant().plus(certificateTimeout), SignatureAlgorithm.SHA512_WITH_RSA, BigInteger.valueOf(1)) .build(); controller.jobController().storeTesterCertificate(id, certificate); zipBuilder.add("artifacts/key", KeyUtils.toPem(keyPair.getPrivate()).getBytes(UTF_8)); zipBuilder.add("artifacts/cert", X509CertificateUtils.toPem(certificate).getBytes(UTF_8)); } private DeploymentId getTesterDeploymentId(RunId runId) { ZoneId zoneId = runId.type().zone(controller.system()); return new DeploymentId(runId.tester().id(), zoneId); } private boolean useConfigServerForTesterAPI(ZoneId zoneId) { BooleanFlag useConfigServerForTesterAPI = Flags.USE_CONFIG_SERVER_FOR_TESTER_API_CALLS.bindTo(controller.flagSource()); boolean useConfigServer = useConfigServerForTesterAPI.with(FetchVector.Dimension.ZONE_ID, zoneId.value()).value(); InternalStepRunner.logger.log(LogLevel.INFO, Flags.USE_CONFIG_SERVER_FOR_TESTER_API_CALLS.id().toString() + " has value " + useConfigServer + " in zone " + zoneId.value()); return useConfigServer; } private static Optional<String> testerFlavorFor(RunId id, DeploymentSpec spec) { for (DeploymentSpec.Step step : spec.steps()) if (step.concerns(id.type().environment())) return step.zones().get(0).testerFlavor(); return Optional.empty(); } /** Returns the generated services.xml content for the tester application. */ static byte[] servicesXml(AthenzDomain domain, boolean systemUsesAthenz, boolean useTesterCertificate, NodeResources resources) { int jdiscMemoryGb = 2; int jdiscMemoryPct = (int) Math.ceil(100 * jdiscMemoryGb / resources.memoryGb()); int testMemoryMb = (int) (1024 * (resources.memoryGb() - jdiscMemoryGb) / 2); String resourceString = String.format(Locale.ENGLISH, "<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\" storage-type=\"%s\"/>", resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name(), resources.storageType().name()); AthenzDomain idDomain = ("vespa.vespa.cd".equals(domain.value()) ? AthenzDomain.from("vespa.vespa") : domain); String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<services xmlns:deploy='vespa' version='1.0'>\n" + " <container version='1.0' id='tester'>\n" + "\n" + " <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" + " <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" + " <artifactsPath>artifacts</artifactsPath>\n" + " <surefireMemoryMb>" + testMemoryMb + "</surefireMemoryMb>\n" + " <useAthenzCredentials>" + systemUsesAthenz + "</useAthenzCredentials>\n" + " <useTesterCertificate>" + useTesterCertificate + "</useTesterCertificate>\n" + " </config>\n" + " </component>\n" + "\n" + " <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" + " <binding>http: " </handler>\n" + "\n" + " <http>\n" + " <!-- Make sure 4080 is the first port. This will be used by the config server. -->\n" + " <server id='default' port='4080'/>\n" + " <server id='testertls4443' port='4443'>\n" + " <config name=\"jdisc.http.connector\">\n" + " <tlsClientAuthEnforcer>\n" + " <enable>true</enable>\n" + " <pathWhitelist>\n" + " <item>/status.html</item>\n" + " <item>/state/v1/config</item>\n" + " </pathWhitelist>\n" + " </tlsClientAuthEnforcer>\n" + " </config>\n" + " <ssl>\n" + " <private-key-file>/var/lib/sia/keys/" + idDomain.value() + ".tenant.key.pem</private-key-file>\n" + " <certificate-file>/var/lib/sia/certs/" + idDomain.value() + ".tenant.cert.pem</certificate-file>\n" + " <ca-certificates-file>/opt/yahoo/share/ssl/certs/athenz_certificate_bundle.pem</ca-certificates-file>\n" + " <client-authentication>want</client-authentication>\n" + " </ssl>\n" + " </server>\n" + " <filtering>\n" + (systemUsesAthenz ? " <access-control domain='" + domain.value() + "'>\n" + " <exclude>\n" + " <binding>http: " </exclude>\n" + " </access-control>\n" : "") + " <request-chain id=\"testrunner-api\">\n" + " <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" + " <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" + " <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" + " <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" + " </config>\n" + " <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" + " <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" + " <resourceName>" + domain.value() + ":tester-application</resourceName>\n" + " <action>deploy</action>\n" + " </config>\n" + " </component>\n" + " </filter>\n" + " </request-chain>\n" + " </filtering>\n" + " </http>\n" + "\n" + " <nodes count=\"1\" allocated-memory=\"" + jdiscMemoryPct + "%\">\n" + " " + resourceString + "\n" + " </nodes>\n" + " </container>\n" + "</services>\n"; return servicesXml.getBytes(UTF_8); } /** Returns a dummy deployment xml which sets up the service identity for the tester, if present. */ private static byte[] deploymentXml(TesterId id, Optional<AthenzDomain> athenzDomain, Optional<AthenzService> athenzService) { String deploymentSpec = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<deployment version=\"1.0\" " + athenzDomain.map(domain -> "athenz-domain=\"" + domain.value() + "\" ").orElse("") + athenzService.map(service -> "athenz-service=\"" + service.value() + "\" ").orElse("") + ">" + " <instance id=\"" + id.id().instance().value() + "\" />" + "</deployment>"; return deploymentSpec.getBytes(UTF_8); } /** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */ private class DualLogger { private final RunId id; private final Step step; private DualLogger(RunId id, Step step) { this.id = id; this.step = step; } private void log(String... messages) { log(List.of(messages)); } private void log(List<String> messages) { controller.jobController().log(id, step, INFO, messages); } private void log(Level level, String message) { log(level, message, null); } private void logWithInternalException(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); controller.jobController().log(id, step, level, message); } private void log(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); if (thrown != null) { ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream(); thrown.printStackTrace(new PrintStream(traceBuffer)); message += "\n" + traceBuffer; } controller.jobController().log(id, step, level, message); } } }
class InternalStepRunner implements StepRunner { private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName()); private static final NodeResources DEFAULT_TESTER_RESOURCES = new NodeResources(1, 4, 50, 0.3, NodeResources.DiskSpeed.any); private static final NodeResources DEFAULT_TESTER_RESOURCES_AWS = new NodeResources(2, 8, 50, 0.3, NodeResources.DiskSpeed.any); static final Duration endpointTimeout = Duration.ofMinutes(15); static final Duration testerTimeout = Duration.ofMinutes(30); static final Duration installationTimeout = Duration.ofMinutes(60); static final Duration certificateTimeout = Duration.ofMinutes(300); private final Controller controller; private final TestConfigSerializer testConfigSerializer; private final DeploymentFailureMails mails; public InternalStepRunner(Controller controller) { this.controller = controller; this.testConfigSerializer = new TestConfigSerializer(controller.system()); this.mails = new DeploymentFailureMails(controller.zoneRegistry()); } @Override public Optional<RunStatus> run(LockedStep step, RunId id) { DualLogger logger = new DualLogger(id, step.get()); try { switch (step.get()) { case deployTester: return deployTester(id, logger); case deployInitialReal: return deployInitialReal(id, logger); case installInitialReal: return installInitialReal(id, logger); case deployReal: return deployReal(id, logger); case installTester: return installTester(id, logger); case installReal: return installReal(id, logger); case startStagingSetup: return startTests(id, true, logger); case endStagingSetup: return endTests(id, logger); case startTests: return startTests(id, false, logger); case endTests: return endTests(id, logger); case copyVespaLogs: return copyVespaLogs(id, logger); case deactivateReal: return deactivateReal(id, logger); case deactivateTester: return deactivateTester(id, logger); case report: return report(id, logger); default: throw new AssertionError("Unknown step '" + step + "'!"); } } catch (UncheckedIOException e) { logger.logWithInternalException(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e), e); return Optional.empty(); } catch (RuntimeException e) { logger.log(WARNING, "Unexpected exception running " + id, e); if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) { logger.log("Will keep trying, as this is a cleanup step."); return Optional.empty(); } return Optional.of(error); } } private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.sourcePlatform().orElse(versions.targetPlatform()) + " and application version " + versions.sourceApplication().orElse(versions.targetApplication()).id() + " ..."); return deployReal(id, true, versions, logger); } private Optional<RunStatus> deployReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.targetPlatform() + " and application version " + versions.targetApplication().id() + " ..."); return deployReal(id, false, versions, logger); } private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, Versions versions, DualLogger logger) { Optional<ApplicationPackage> applicationPackage = id.type().environment().isManuallyDeployed() ? Optional.of(new ApplicationPackage(controller.applications().applicationStore() .getDev(id.application(), id.type().zone(controller.system())))) : Optional.empty(); Optional<Version> vespaVersion = id.type().environment().isManuallyDeployed() ? Optional.of(versions.targetPlatform()) : Optional.empty(); return deploy(id.application(), id.type(), () -> controller.applications().deploy(id.application(), id.type().zone(controller.system()), applicationPackage, new DeployOptions(false, vespaVersion, false, setTheStage)), controller.jobController().run(id).get() .stepInfo(setTheStage ? deployInitialReal : deployReal).get() .startTime().get(), logger); } private Optional<RunStatus> deployTester(RunId id, DualLogger logger) { Version platform = controller.systemVersion(); logger.log("Deploying the tester container on platform " + platform + " ..."); return deploy(id.tester().id(), id.type(), () -> controller.applications().deployTester(id.tester(), testerPackage(id), id.type().zone(controller.system()), new DeployOptions(true, Optional.of(platform), false, false)), controller.jobController().run(id).get() .stepInfo(deployTester).get() .startTime().get(), logger); } private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, Instant startTime, DualLogger logger) { try { PrepareResponse prepareResponse = deployment.get().prepareResponse(); if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) { List<String> messages = new ArrayList<>(); messages.add("Deploy failed due to non-compatible changes that require re-feed."); messages.add("Your options are:"); messages.add("1. Revert the incompatible changes."); messages.add("2. If you think it is safe in your case, you can override this validation, see"); messages.add(" http: messages.add("3. Deploy as a new application under a different name."); messages.add("Illegal actions:"); prepareResponse.configChangeActions.refeedActions.stream() .filter(action -> ! action.allowed) .flatMap(action -> action.messages.stream()) .forEach(messages::add); messages.add("Details:"); prepareResponse.log.stream() .map(entry -> entry.message) .forEach(messages::add); logger.log(messages); return Optional.of(deploymentFailed); } if (prepareResponse.configChangeActions.restartActions.isEmpty()) logger.log("No services requiring restart."); else prepareResponse.configChangeActions.restartActions.stream() .flatMap(action -> action.services.stream()) .map(service -> service.hostName) .sorted().distinct() .map(Hostname::new) .forEach(hostname -> { controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname)); logger.log("Restarting services on host " + hostname.id() + "."); }); logger.log("Deployment successful."); if (prepareResponse.message != null) logger.log(prepareResponse.message); return Optional.of(running); } catch (ConfigServerException e) { Optional<RunStatus> result = startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(deploymentFailed) : Optional.empty(); switch (e.getErrorCode()) { case ACTIVATION_CONFLICT: case APPLICATION_LOCK_FAILURE: case CERTIFICATE_NOT_READY: logger.log("Deployment failed with possibly transient error " + e.getErrorCode() + ", will retry: " + e.getMessage()); return result; case LOAD_BALANCER_NOT_READY: case PARENT_HOST_NOT_READY: logger.log(e.getServerMessage()); return result; case OUT_OF_CAPACITY: logger.log(e.getServerMessage()); return Optional.of(outOfCapacity); case INVALID_APPLICATION_PACKAGE: case BAD_REQUEST: logger.log(e.getMessage()); return Optional.of(deploymentFailed); } throw e; } } private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) { return installReal(id, true, logger); } private Optional<RunStatus> installReal(RunId id, DualLogger logger) { return installReal(id, false, logger); } private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before installation was successful."); return Optional.of(installationFailed); } Versions versions = controller.jobController().run(id).get().versions(); Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform(); Run run = controller.jobController().run(id).get(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(id.application(), id.type().zone(controller.system())), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); Step step = setTheStage ? installInitialReal : installReal; return run.stepInfo(step).get().startTime().get().isBefore(controller.clock().instant().minus(Duration.ofMinutes(5))) ? Optional.of(error) : Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), id.application(), ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); boolean firstTick = run.convergenceSummary().isEmpty(); if (firstTick) { logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); } ConvergenceSummary summary = nodeList.summary(); if (summary.converged()) { controller.jobController().locked(id, lockedRun -> lockedRun.withSummary(null)); if (endpointsAvailable(id.application(), id.type().zone(controller.system()), logger)) { if (containersAreUp(id.application(), id.type().zone(controller.system()), logger)) { logger.log("Installation succeeded!"); return Optional.of(running); } } else if (timedOut(id, deployment.get(), endpointTimeout)) { logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!"); return Optional.of(error); } } boolean failed = false; NodeList suspendedTooLong = nodeList.suspendedSince(controller.clock().instant().minus(installationTimeout)); if ( ! suspendedTooLong.isEmpty()) { logger.log(INFO, "Some nodes have been suspended for more than " + installationTimeout.toMinutes() + " minutes."); failed = true; } if (run.noNodesDownSince() .map(since -> since.isBefore(controller.clock().instant().minus(installationTimeout))) .orElse(false)) { if (summary.needPlatformUpgrade() > 0 || summary.needReboot() > 0 || summary.needRestart() > 0) logger.log(INFO, "No nodes allowed to suspend to progress installation for " + installationTimeout.toMinutes() + " minutes."); else logger.log(INFO, "Nodes not able to start with new application package."); failed = true; } Duration timeout = JobRunner.jobTimeout.minusHours(1); if (timedOut(id, deployment.get(), timeout)) { logger.log(INFO, "Installation failed to complete within " + timeout.toHours() + "hours!"); failed = true; } if (failed) { logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); return Optional.of(installationFailed); } if ( ! firstTick) logger.log(nodeList.allowedDown().asList().stream() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); controller.jobController().locked(id, lockedRun -> { Instant noNodesDownSince = summary.down() == 0 ? lockedRun.noNodesDownSince().orElse(controller.clock().instant()) : null; return lockedRun.noNodesDownSince(noNodesDownSince).withSummary(summary); }); return Optional.empty(); } private Optional<RunStatus> installTester(RunId id, DualLogger logger) { Run run = controller.jobController().run(id).get(); Version platform = controller.systemVersion(); ZoneId zone = id.type().zone(controller.system()); ApplicationId testerId = id.tester().id(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(testerId, zone), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); return run.stepInfo(installTester).get().startTime().get().isBefore(controller.clock().instant().minus(Duration.ofMinutes(5))) ? Optional.of(error) : Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(zone, testerId, ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(zone, nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); if (nodeList.summary().converged()) { if (endpointsAvailable(testerId, zone, logger)) { if (testerContainersAreUp(testerId, zone, logger)) { logger.log("Tester container successfully installed!"); return Optional.of(running); } } else if (run.stepInfo(installTester).get().startTime().get().plus(endpointTimeout).isBefore(controller.clock().instant())) { logger.log(WARNING, "Tester failed to show up within " + endpointTimeout.toMinutes() + " minutes!"); return Optional.of(error); } } if (run.stepInfo(installTester).get().startTime().get().plus(testerTimeout).isBefore(controller.clock().instant())) { logger.log(WARNING, "Installation of tester failed to complete within " + testerTimeout.toMinutes() + " minutes!"); return Optional.of(error); } return Optional.empty(); } /** Returns true iff all containers in the deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean containersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zoneId))); if ( ! endpoints.containsKey(zoneId)) return false; for (URI endpoint : endpoints.get(zoneId).values()) { boolean ready = id.instance().isTester() ? controller.jobController().cloud().testerReady(endpoint) : controller.jobController().cloud().ready(endpoint); if (!ready) { logger.log("Failed to get 100 consecutive OKs from " + endpoint); return false; } } return true; } /** Returns true iff all containers in the tester deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean endpointsAvailable(ApplicationId id, ZoneId zone, DualLogger logger) { var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zone))); if ( ! endpoints.containsKey(zone)) { logger.log("Endpoints not yet ready."); return false; } for (var endpoint : endpoints.get(zone).values()) if ( ! controller.jobController().cloud().exists(endpoint)) { logger.log(INFO, "DNS lookup yielded no IP address for '" + endpoint + "'."); return false; } logEndpoints(endpoints, logger); return true; } private void logEndpoints(Map<ZoneId, Map<ClusterSpec.Id, URI>> endpoints, DualLogger logger) { List<String> messages = new ArrayList<>(); messages.add("Found endpoints:"); endpoints.forEach((zone, uris) -> { messages.add("- " + zone); uris.forEach((cluster, uri) -> messages.add(" |-- " + uri + " (" + cluster + ")")); }); logger.log(messages); } private Stream<String> nodeDetails(NodeWithServices node, boolean printAllServices) { return Stream.concat(Stream.of(node.node().hostname() + ": " + humanize(node.node().serviceState()), "--- platform " + node.node().wantedVersion() + (node.needsPlatformUpgrade() ? " <-- " + (node.node().currentVersion().isEmpty() ? "not booted" : node.node().currentVersion()) : "") + (node.needsOsUpgrade() && node.isAllowedDown() ? ", upgrading OS (" + node.node().wantedOsVersion() + " <-- " + node.node().currentOsVersion() + ")" : "") + (node.needsFirmwareUpgrade() && node.isAllowedDown() ? ", upgrading firmware" : "") + (node.needsRestart() ? ", restart pending (" + node.node().wantedRestartGeneration() + " <-- " + node.node().restartGeneration() + ")" : "") + (node.needsReboot() ? ", reboot pending (" + node.node().wantedRebootGeneration() + " <-- " + node.node().rebootGeneration() + ")" : "")), node.services().stream() .filter(service -> printAllServices || node.needsNewConfig()) .map(service -> "--- " + service.type() + " on port " + service.port() + (service.currentGeneration() == -1 ? " has not started " : " has config generation " + service.currentGeneration() + ", wanted is " + node.wantedConfigGeneration()))); } private String humanize(Node.ServiceState state) { switch (state) { case allowedDown: return "allowed to be DOWN"; case expectedUp: return "expected to be UP"; case unorchestrated: return "unorchestrated"; default: return state.name(); } } private Optional<RunStatus> startTests(RunId id, boolean isSetup, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before tests could start."); return Optional.of(error); } var deployments = controller.applications().requireInstance(id.application()) .productionDeployments().keySet().stream() .map(zone -> new DeploymentId(id.application(), zone)) .collect(Collectors.toSet()); ZoneId zoneId = id.type().zone(controller.system()); deployments.add(new DeploymentId(id.application(), zoneId)); logger.log("Attempting to find endpoints ..."); var endpoints = controller.applications().clusterEndpoints(deployments); if ( ! endpoints.containsKey(zoneId)) { logger.log(WARNING, "Endpoints for the deployment to test vanished again, while it was still active!"); return Optional.of(error); } logEndpoints(endpoints, logger); Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id); if (useConfigServerForTesterAPI(zoneId)) { if ( ! controller.serviceRegistry().configServer().isTesterReady(getTesterDeploymentId(id))) { logger.log(WARNING, "Tester container went bad!"); return Optional.of(error); } } else { if (testerEndpoint.isEmpty()) { logger.log(WARNING, "Endpoints for the tester container vanished again, while it was still active!"); return Optional.of(error); } if ( ! controller.jobController().cloud().testerReady(testerEndpoint.get())) { logger.log(WARNING, "Tester container went bad!"); return Optional.of(error); } } logger.log("Starting tests ..."); TesterCloud.Suite suite = TesterCloud.Suite.of(id.type(), isSetup); byte[] config = testConfigSerializer.configJson(id.application(), id.type(), true, endpoints, controller.applications().contentClustersByZone(deployments)); if (useConfigServerForTesterAPI(zoneId)) { controller.serviceRegistry().configServer().startTests(getTesterDeploymentId(id), suite, config); } else { controller.jobController().cloud().startTests(testerEndpoint.get(), suite, config); } return Optional.of(running); } private boolean testerReady(RunId id, URI testerEndpoint) { if (useConfigServerForTesterAPI(id.type().zone(controller.system()))) { return controller.serviceRegistry().configServer().isTesterReady(getTesterDeploymentId(id)); } else { return controller.jobController().cloud().testerReady(testerEndpoint); } } private Optional<RunStatus> endTests(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isEmpty()) { logger.log(INFO, "Deployment expired before tests could complete."); return Optional.of(aborted); } Optional<X509Certificate> testerCertificate = controller.jobController().run(id).get().testerCertificate(); if (testerCertificate.isPresent()) { try { testerCertificate.get().checkValidity(Date.from(controller.clock().instant())); } catch (CertificateExpiredException | CertificateNotYetValidException e) { logger.log(INFO, "Tester certificate expired before tests could complete."); return Optional.of(aborted); } } controller.jobController().updateTestLog(id); TesterCloud.Status testStatus; if (useConfigServerForTesterAPI(id.type().zone(controller.system()))) { testStatus = controller.serviceRegistry().configServer().getTesterStatus(getTesterDeploymentId(id)); } else { Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id); if (testerEndpoint.isEmpty()) { logger.log("Endpoints for tester not found -- trying again later."); return Optional.empty(); } testStatus = controller.jobController().cloud().getStatus(testerEndpoint.get()); } switch (testStatus) { case NOT_STARTED: throw new IllegalStateException("Tester reports tests not started, even though they should have!"); case RUNNING: return Optional.empty(); case FAILURE: logger.log("Tests failed."); return Optional.of(testFailure); case ERROR: logger.log(INFO, "Tester failed running its tests!"); return Optional.of(error); case SUCCESS: logger.log("Tests completed successfully."); return Optional.of(running); default: throw new IllegalStateException("Unknown status '" + testStatus + "'!"); } } private Optional<RunStatus> copyVespaLogs(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isPresent()) try { controller.jobController().updateVespaLog(id); } catch (Exception e) { logger.log(INFO, "Failure getting vespa logs for " + id, e); return Optional.of(error); } return Optional.of(running); } private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) { try { logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.applications().deactivate(id.application(), id.type().zone(controller.system())); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting application " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateReal).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) { try { logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.jobController().deactivateTester(id.tester(), id.type()); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting tester of " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateTester).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> report(RunId id, DualLogger logger) { try { controller.jobController().active(id).ifPresent(run -> { if (run.hasFailed()) sendNotification(run, logger); }); } catch (IllegalStateException e) { logger.log(INFO, "Job '" + id.type() + "' no longer supposed to run?", e); return Optional.of(error); } return Optional.of(running); } /** Sends a mail with a notification of a failed run, if one should be sent. */ private void sendNotification(Run run, DualLogger logger) { Application application = controller.applications().requireApplication(TenantAndApplicationId.from(run.id().application())); Notifications notifications = application.deploymentSpec().requireInstance(run.id().application().instance()).notifications(); boolean newCommit = application.require(run.id().application().instance()).change().application() .map(run.versions().targetApplication()::equals) .orElse(false); When when = newCommit ? failingCommit : failing; List<String> recipients = new ArrayList<>(notifications.emailAddressesFor(when)); if (notifications.emailRolesFor(when).contains(author)) run.versions().targetApplication().authorEmail().ifPresent(recipients::add); if (recipients.isEmpty()) return; try { if (run.status() == outOfCapacity && run.id().type().isProduction()) controller.serviceRegistry().mailer().send(mails.outOfCapacity(run.id(), recipients)); if (run.status() == deploymentFailed) controller.serviceRegistry().mailer().send(mails.deploymentFailure(run.id(), recipients)); if (run.status() == installationFailed) controller.serviceRegistry().mailer().send(mails.installationFailure(run.id(), recipients)); if (run.status() == testFailure) controller.serviceRegistry().mailer().send(mails.testFailure(run.id(), recipients)); if (run.status() == error) controller.serviceRegistry().mailer().send(mails.systemError(run.id(), recipients)); } catch (RuntimeException e) { logger.log(INFO, "Exception trying to send mail for " + run.id(), e); } } /** Returns the deployment of the real application in the zone of the given job, if it exists. */ private Optional<Deployment> deployment(ApplicationId id, JobType type) { return Optional.ofNullable(application(id).deployments().get(type.zone(controller.system()))); } /** Returns the real application with the given id. */ private Instance application(ApplicationId id) { controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), __ -> { }); return controller.applications().requireInstance(id); } /** * Returns whether the time since deployment is more than the zone deployment expiry, or the given timeout. * * We time out the job before the deployment expires, for zones where deployments are not persistent, * to be able to collect the Vespa log from the deployment. Thus, the lower of the zone's deployment expiry, * and the given default installation timeout, minus one minute, is used as a timeout threshold. */ private boolean timedOut(RunId id, Deployment deployment, Duration defaultTimeout) { Run run = controller.jobController().run(id).get(); if ( ! controller.system().isCd() && run.start().isAfter(deployment.at())) return false; Duration timeout = controller.zoneRegistry().getDeploymentTimeToLive(deployment.zone()) .filter(zoneTimeout -> zoneTimeout.compareTo(defaultTimeout) < 0) .orElse(defaultTimeout); return deployment.at().isBefore(controller.clock().instant().minus(timeout.minus(Duration.ofMinutes(1)))); } /** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */ private ApplicationPackage testerPackage(RunId id) { ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication(); DeploymentSpec spec = controller.applications().requireApplication(TenantAndApplicationId.from(id.application())).deploymentSpec(); ZoneId zone = id.type().zone(controller.system()); boolean useTesterCertificate = controller.system().isPublic() && id.type().environment().isTest(); byte[] servicesXml = servicesXml(controller.zoneRegistry().accessControlDomain(), ! controller.system().isPublic(), useTesterCertificate, testerFlavorFor(id, spec) .map(NodeResources::fromLegacyName) .orElse(zone.region().value().contains("aws-") ? DEFAULT_TESTER_RESOURCES_AWS : DEFAULT_TESTER_RESOURCES)); byte[] testPackage = controller.applications().applicationStore().getTester(id.application().tenant(), id.application().application(), version); byte[] deploymentXml = deploymentXml(id.tester(), spec.athenzDomain(), spec.requireInstance(id.application().instance()).athenzService(zone.environment(), zone.region())); try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) { zipBuilder.add(testPackage); zipBuilder.add("services.xml", servicesXml); zipBuilder.add("deployment.xml", deploymentXml); if (useTesterCertificate) appendAndStoreCertificate(zipBuilder, id); zipBuilder.close(); return new ApplicationPackage(zipBuilder.toByteArray()); } } private void appendAndStoreCertificate(ZipBuilder zipBuilder, RunId id) { KeyPair keyPair = KeyUtils.generateKeypair(KeyAlgorithm.RSA, 2048); X500Principal subject = new X500Principal("CN=" + id.tester().id().toFullString() + "." + id.type() + "." + id.number()); X509Certificate certificate = X509CertificateBuilder.fromKeypair(keyPair, subject, controller.clock().instant(), controller.clock().instant().plus(certificateTimeout), SignatureAlgorithm.SHA512_WITH_RSA, BigInteger.valueOf(1)) .build(); controller.jobController().storeTesterCertificate(id, certificate); zipBuilder.add("artifacts/key", KeyUtils.toPem(keyPair.getPrivate()).getBytes(UTF_8)); zipBuilder.add("artifacts/cert", X509CertificateUtils.toPem(certificate).getBytes(UTF_8)); } private DeploymentId getTesterDeploymentId(RunId runId) { ZoneId zoneId = runId.type().zone(controller.system()); return new DeploymentId(runId.tester().id(), zoneId); } private boolean useConfigServerForTesterAPI(ZoneId zoneId) { BooleanFlag useConfigServerForTesterAPI = Flags.USE_CONFIG_SERVER_FOR_TESTER_API_CALLS.bindTo(controller.flagSource()); boolean useConfigServer = useConfigServerForTesterAPI.with(FetchVector.Dimension.ZONE_ID, zoneId.value()).value(); InternalStepRunner.logger.log(LogLevel.INFO, Flags.USE_CONFIG_SERVER_FOR_TESTER_API_CALLS.id().toString() + " has value " + useConfigServer + " in zone " + zoneId.value()); return useConfigServer; } private static Optional<String> testerFlavorFor(RunId id, DeploymentSpec spec) { for (DeploymentSpec.Step step : spec.steps()) if (step.concerns(id.type().environment())) return step.zones().get(0).testerFlavor(); return Optional.empty(); } /** Returns the generated services.xml content for the tester application. */ static byte[] servicesXml(AthenzDomain domain, boolean systemUsesAthenz, boolean useTesterCertificate, NodeResources resources) { int jdiscMemoryGb = 2; int jdiscMemoryPct = (int) Math.ceil(100 * jdiscMemoryGb / resources.memoryGb()); int testMemoryMb = (int) (1024 * (resources.memoryGb() - jdiscMemoryGb) / 2); String resourceString = String.format(Locale.ENGLISH, "<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\" storage-type=\"%s\"/>", resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name(), resources.storageType().name()); AthenzDomain idDomain = ("vespa.vespa.cd".equals(domain.value()) ? AthenzDomain.from("vespa.vespa") : domain); String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<services xmlns:deploy='vespa' version='1.0'>\n" + " <container version='1.0' id='tester'>\n" + "\n" + " <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" + " <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" + " <artifactsPath>artifacts</artifactsPath>\n" + " <surefireMemoryMb>" + testMemoryMb + "</surefireMemoryMb>\n" + " <useAthenzCredentials>" + systemUsesAthenz + "</useAthenzCredentials>\n" + " <useTesterCertificate>" + useTesterCertificate + "</useTesterCertificate>\n" + " </config>\n" + " </component>\n" + "\n" + " <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" + " <binding>http: " </handler>\n" + "\n" + " <http>\n" + " <!-- Make sure 4080 is the first port. This will be used by the config server. -->\n" + " <server id='default' port='4080'/>\n" + " <server id='testertls4443' port='4443'>\n" + " <config name=\"jdisc.http.connector\">\n" + " <tlsClientAuthEnforcer>\n" + " <enable>true</enable>\n" + " <pathWhitelist>\n" + " <item>/status.html</item>\n" + " <item>/state/v1/config</item>\n" + " </pathWhitelist>\n" + " </tlsClientAuthEnforcer>\n" + " </config>\n" + " <ssl>\n" + " <private-key-file>/var/lib/sia/keys/" + idDomain.value() + ".tenant.key.pem</private-key-file>\n" + " <certificate-file>/var/lib/sia/certs/" + idDomain.value() + ".tenant.cert.pem</certificate-file>\n" + " <ca-certificates-file>/opt/yahoo/share/ssl/certs/athenz_certificate_bundle.pem</ca-certificates-file>\n" + " <client-authentication>want</client-authentication>\n" + " </ssl>\n" + " </server>\n" + " <filtering>\n" + (systemUsesAthenz ? " <access-control domain='" + domain.value() + "'>\n" + " <exclude>\n" + " <binding>http: " </exclude>\n" + " </access-control>\n" : "") + " <request-chain id=\"testrunner-api\">\n" + " <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" + " <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" + " <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" + " <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" + " </config>\n" + " <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" + " <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" + " <resourceName>" + domain.value() + ":tester-application</resourceName>\n" + " <action>deploy</action>\n" + " </config>\n" + " </component>\n" + " </filter>\n" + " </request-chain>\n" + " </filtering>\n" + " </http>\n" + "\n" + " <nodes count=\"1\" allocated-memory=\"" + jdiscMemoryPct + "%\">\n" + " " + resourceString + "\n" + " </nodes>\n" + " </container>\n" + "</services>\n"; return servicesXml.getBytes(UTF_8); } /** Returns a dummy deployment xml which sets up the service identity for the tester, if present. */ private static byte[] deploymentXml(TesterId id, Optional<AthenzDomain> athenzDomain, Optional<AthenzService> athenzService) { String deploymentSpec = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<deployment version=\"1.0\" " + athenzDomain.map(domain -> "athenz-domain=\"" + domain.value() + "\" ").orElse("") + athenzService.map(service -> "athenz-service=\"" + service.value() + "\" ").orElse("") + ">" + " <instance id=\"" + id.id().instance().value() + "\" />" + "</deployment>"; return deploymentSpec.getBytes(UTF_8); } /** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */ private class DualLogger { private final RunId id; private final Step step; private DualLogger(RunId id, Step step) { this.id = id; this.step = step; } private void log(String... messages) { log(List.of(messages)); } private void log(List<String> messages) { controller.jobController().log(id, step, INFO, messages); } private void log(Level level, String message) { log(level, message, null); } private void logWithInternalException(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); controller.jobController().log(id, step, level, message); } private void log(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); if (thrown != null) { ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream(); thrown.printStackTrace(new PrintStream(traceBuffer)); message += "\n" + traceBuffer; } controller.jobController().log(id, step, level, message); } } }
```suggestion .setDescription("The temperature unit to use. Infer this from the user's location."); ```
private static FunctionParameters getCurrentWeatherFunctionParameters() { FunctionProperties location = new FunctionProperties() .setType("string") .setDescription("The city and state, e.g. San Francisco, CA"); FunctionProperties unit = new FunctionProperties() .setType("string") .setEnumString(Arrays.asList("celsius", "fahrenheit")) .setDescription("The temperature unit to use. Infer this from the users location."); Map<String, FunctionProperties> props = new HashMap<>(); props.put("location", location); props.put("unit", unit); return new FunctionParameters() .setType("object") .setRequiredPropertyNames(Arrays.asList("location", "unit")) .setProperties(props); }
.setDescription("The temperature unit to use. Infer this from the users location.");
private static FunctionParameters getCurrentWeatherFunctionParameters() { FunctionProperties location = new FunctionProperties() .setType("string") .setDescription("The city and state, e.g. San Francisco, CA"); FunctionProperties unit = new FunctionProperties() .setType("string") .setEnumString(Arrays.asList("celsius", "fahrenheit")) .setDescription("The temperature unit to use. Infer this from the user's location."); Map<String, FunctionProperties> props = new HashMap<>(); props.put("location", location); props.put("unit", unit); return new FunctionParameters() .setType("object") .setRequiredPropertyNames(Arrays.asList("location", "unit")) .setProperties(props); }
class ChatCompletionsFunctionCall { /** * Runs the sample algorithm and demonstrates how to get chat completions using function call. * * @param args Unused. Arguments to the program. */ public static void main(String[] args) { String azureOpenaiKey = "{azure-open-ai-key}"; String endpoint = "{azure-open-ai-endpoint}"; String deploymentOrModelId = "{azure-open-ai-deployment-model-id}"; OpenAIClient client = new OpenAIClientBuilder() .endpoint(endpoint) .credential(new AzureKeyCredential(azureOpenaiKey)) .buildClient(); List<FunctionDefinition> functions = Arrays.asList( new FunctionDefinition("getCurrentWeather") .setDescription("Get the current weather") .setParameters(BinaryData.fromObject(getCurrentWeatherFunctionParameters())) ); List<ChatRequestMessage> chatRequestMessages = new ArrayList<>(); chatRequestMessages.add(new ChatRequestUserMessage("What should I wear in Boston depending on the weather?")); ChatCompletions chatCompletions = client.getChatCompletions(deploymentOrModelId, new ChatCompletionsOptions(chatRequestMessages) .setFunctionCall(FunctionCallConfig.AUTO) .setFunctions(functions)); chatRequestMessages = handleFunctionCallResponse(chatCompletions.getChoices(), chatRequestMessages); ChatCompletions chatCompletionsAnswer = client.getChatCompletions(deploymentOrModelId, new ChatCompletionsOptions(chatRequestMessages)); System.out.printf("Message: %s.%n", chatCompletionsAnswer.getChoices().get(0).getMessage().getContent()); } private static List<ChatRequestMessage> handleFunctionCallResponse(List<ChatChoice> choices, List<ChatRequestMessage> chatMessages) { for (ChatChoice choice : choices) { ChatResponseMessage choiceMessage = choice.getMessage(); FunctionCall functionCall = choiceMessage.getFunctionCall(); if (CompletionsFinishReason.FUNCTION_CALL.equals(choice.getFinishReason())) { System.out.printf("Function name: %s, arguments: %s.%n", functionCall.getName(), functionCall.getArguments()); WeatherLocation weatherLocation = BinaryData.fromString(functionCall.getArguments()) .toObject(WeatherLocation.class); int currentWeather = getCurrentWeather(weatherLocation); chatMessages.add(new ChatRequestUserMessage(String.format("The weather in %s is %d degrees %s.", weatherLocation.getLocation(), currentWeather, weatherLocation.getUnit()))); } else { ChatRequestAssistantMessage messageHistory = new ChatRequestAssistantMessage(choiceMessage.getContent()); messageHistory.setFunctionCall(choiceMessage.getFunctionCall()); chatMessages.add(messageHistory); } } return chatMessages; } private static int getCurrentWeather(WeatherLocation weatherLocation) { return 35; } private static class WeatherLocation { @JsonProperty(value = "unit") String unit; @JsonProperty(value = "location") String location; @JsonCreator WeatherLocation(@JsonProperty(value = "unit") String unit, @JsonProperty(value = "location") String location) { this.unit = unit; this.location = location; } public String getUnit() { return unit; } public String getLocation() { return location; } } }
class ChatCompletionsFunctionCall { /** * Runs the sample algorithm and demonstrates how to get chat completions using function call. * * @param args Unused. Arguments to the program. */ public static void main(String[] args) { String azureOpenaiKey = "{azure-open-ai-key}"; String endpoint = "{azure-open-ai-endpoint}"; String deploymentOrModelId = "{azure-open-ai-deployment-model-id}"; OpenAIClient client = new OpenAIClientBuilder() .endpoint(endpoint) .credential(new AzureKeyCredential(azureOpenaiKey)) .buildClient(); List<FunctionDefinition> functions = Arrays.asList( new FunctionDefinition("getCurrentWeather") .setDescription("Get the current weather") .setParameters(BinaryData.fromObject(getCurrentWeatherFunctionParameters())) ); List<ChatRequestMessage> chatRequestMessages = new ArrayList<>(); chatRequestMessages.add(new ChatRequestUserMessage("What should I wear in Boston depending on the weather?")); ChatCompletions chatCompletions = client.getChatCompletions(deploymentOrModelId, new ChatCompletionsOptions(chatRequestMessages) .setFunctionCall(FunctionCallConfig.AUTO) .setFunctions(functions)); chatRequestMessages = handleFunctionCallResponse(chatCompletions.getChoices(), chatRequestMessages); ChatCompletions chatCompletionsAnswer = client.getChatCompletions(deploymentOrModelId, new ChatCompletionsOptions(chatRequestMessages)); System.out.printf("Message: %s.%n", chatCompletionsAnswer.getChoices().get(0).getMessage().getContent()); } private static List<ChatRequestMessage> handleFunctionCallResponse(List<ChatChoice> choices, List<ChatRequestMessage> chatMessages) { for (ChatChoice choice : choices) { ChatResponseMessage choiceMessage = choice.getMessage(); FunctionCall functionCall = choiceMessage.getFunctionCall(); if (CompletionsFinishReason.FUNCTION_CALL.equals(choice.getFinishReason())) { System.out.printf("Function name: %s, arguments: %s.%n", functionCall.getName(), functionCall.getArguments()); WeatherLocation weatherLocation = BinaryData.fromString(functionCall.getArguments()) .toObject(WeatherLocation.class); int currentWeather = getCurrentWeather(weatherLocation); chatMessages.add(new ChatRequestUserMessage(String.format("The weather in %s is %d degrees %s.", weatherLocation.getLocation(), currentWeather, weatherLocation.getUnit()))); } else { ChatRequestAssistantMessage messageHistory = new ChatRequestAssistantMessage(choiceMessage.getContent()); messageHistory.setFunctionCall(choiceMessage.getFunctionCall()); chatMessages.add(messageHistory); } } return chatMessages; } private static int getCurrentWeather(WeatherLocation weatherLocation) { return 35; } private static class WeatherLocation { @JsonProperty(value = "unit") String unit; @JsonProperty(value = "location") String location; @JsonCreator WeatherLocation(@JsonProperty(value = "unit") String unit, @JsonProperty(value = "location") String location) { this.unit = unit; this.location = location; } public String getUnit() { return unit; } public String getLocation() { return location; } } }
The test itself is easy to expire due to the too short timeout duration.
public void testDiscardOnTimeout() throws Exception { int timeoutMs = 5; int numAttempts = 7; int successfulAttempt = numAttempts - 1; List<Integer> completed = new CopyOnWriteArrayList<>(); List<Integer> discarded = new CopyOnWriteArrayList<>(); AtomicBoolean executionBlocked = new AtomicBoolean(true); Deadline deadline = Deadline.fromNow(Duration.ofMillis(timeoutMs * numAttempts * 2)); try (RetryingExecutor executor = new RetryingExecutor( numAttempts, createUnregisteredChangelogStorageMetricGroup().getAttemptsPerUpload())) { executor.execute( RetryPolicy.fixed(numAttempts, timeoutMs, 0), new RetriableAction<Integer>() { private final AtomicInteger attemptsCounter = new AtomicInteger(0); @Override public Integer tryExecute() throws Exception { int attempt = attemptsCounter.getAndIncrement(); if (attempt < successfulAttempt) { while (executionBlocked.get()) { Thread.sleep(10); } } return attempt; } @Override public void completeWithResult(Integer result) { completed.add(result); } @Override public void discardResult(Integer result) { discarded.add(result); } @Override public void handleFailure(Throwable throwable) {} }); while (completed.isEmpty() && deadline.hasTimeLeft()) { Thread.sleep(10); } executionBlocked.set(false); while (discarded.size() < successfulAttempt && deadline.hasTimeLeft()) { Thread.sleep(10); } } assertEquals(singletonList(successfulAttempt), completed); assertEquals( IntStream.range(0, successfulAttempt).boxed().collect(toList()), discarded.stream().sorted().collect(toList())); }
Deadline deadline = Deadline.fromNow(Duration.ofMillis(timeoutMs * numAttempts * 2));
public void testDiscardOnTimeout() throws Exception { int timeoutMs = 5; int numAttempts = 7; int successfulAttempt = numAttempts - 1; List<Integer> completed = new CopyOnWriteArrayList<>(); List<Integer> discarded = new CopyOnWriteArrayList<>(); AtomicBoolean executionBlocked = new AtomicBoolean(true); Deadline deadline = Deadline.fromNow(Duration.ofMinutes(5)); try (RetryingExecutor executor = new RetryingExecutor( numAttempts, createUnregisteredChangelogStorageMetricGroup().getAttemptsPerUpload())) { executor.execute( RetryPolicy.fixed(numAttempts, timeoutMs, 0), new RetriableAction<Integer>() { private final AtomicInteger attemptsCounter = new AtomicInteger(0); @Override public Integer tryExecute() throws Exception { int attempt = attemptsCounter.getAndIncrement(); if (attempt < successfulAttempt) { while (executionBlocked.get()) { Thread.sleep(10); } } return attempt; } @Override public void completeWithResult(Integer result) { completed.add(result); } @Override public void discardResult(Integer result) { discarded.add(result); } @Override public void handleFailure(Throwable throwable) {} }); while (completed.isEmpty() && deadline.hasTimeLeft()) { Thread.sleep(10); } executionBlocked.set(false); while (discarded.size() < successfulAttempt && deadline.hasTimeLeft()) { Thread.sleep(10); } } assertEquals(singletonList(successfulAttempt), completed); assertEquals( IntStream.range(0, successfulAttempt).boxed().collect(toList()), discarded.stream().sorted().collect(toList())); }
class RetryingExecutorTest { private static final ThrowingConsumer<Integer, Exception> FAILING_TASK = attempt -> { throw new IOException(); }; @Test public void testNoRetries() throws Exception { testPolicy(1, RetryPolicy.NONE, FAILING_TASK); } @Test public void testFixedRetryLimit() throws Exception { testPolicy(5, RetryPolicy.fixed(5, 0, 0), FAILING_TASK); } @Test @Test public void testFixedRetrySuccess() throws Exception { int successfulAttempt = 3; int maxAttempts = successfulAttempt * 2; testPolicy( successfulAttempt, RetryPolicy.fixed(maxAttempts, 0, 0), attempt -> { if (attempt < successfulAttempt) { throw new IOException(); } }); } @Test public void testNonRetryableException() throws Exception { testPolicy( 1, RetryPolicy.fixed(Integer.MAX_VALUE, 0, 0), ignored -> { throw new RuntimeException(); }); } @Test public void testRetryDelay() throws Exception { int delayAfterFailure = 123; int numAttempts = 2; testPolicy( numAttempts, RetryPolicy.fixed(Integer.MAX_VALUE, 0, delayAfterFailure), a -> { if (a < numAttempts) { throw new IOException(); } }, new DirectScheduledExecutorService() { @Override public ScheduledFuture<?> schedule( Runnable command, long delay, TimeUnit unit) { assertEquals(delayAfterFailure, delay); command.run(); return CompletedScheduledFuture.create(null); } }); } @Test public void testNoRetryDelayIfTimeout() throws Exception { int delayAfterFailure = 123; int numAttempts = 2; testPolicy( numAttempts, RetryPolicy.fixed(Integer.MAX_VALUE, 0, delayAfterFailure), a -> { if (a < numAttempts) { throw new TimeoutException(); } }, new DirectScheduledExecutorService() { @Override public ScheduledFuture<?> schedule( Runnable command, long delay, TimeUnit unit) { fail("task should be executed directly without delay after timeout"); return CompletedScheduledFuture.create(null); } }); } @Test public void testTimeout() throws Exception { int numAttempts = 2; int timeout = 500; CompletableFuture<Long> firstStart = new CompletableFuture<>(); CompletableFuture<Long> secondStart = new CompletableFuture<>(); testPolicy( numAttempts, RetryPolicy.fixed(Integer.MAX_VALUE, timeout, 0), a -> { long now = System.nanoTime(); if (a < numAttempts) { firstStart.complete(now); secondStart.get(); } else { secondStart.complete(now); } }, Executors.newScheduledThreadPool(2)); assertEquals( timeout, ((double) secondStart.get() - firstStart.get()) / 1_000_000, timeout * 0.75d /* future completion can be delayed arbitrarily causing start delta be less than timeout */); } private void testPolicy( int expectedAttempts, RetryPolicy policy, ThrowingConsumer<Integer, Exception> task) throws Exception { testPolicy(expectedAttempts, policy, task, new DirectScheduledExecutorService()); } private void testPolicy( int expectedAttempts, RetryPolicy policy, ThrowingConsumer<Integer, Exception> task, ScheduledExecutorService scheduler) throws Exception { AtomicInteger attemptsMade = new AtomicInteger(0); CountDownLatch firstAttemptCompletedLatch = new CountDownLatch(1); try (RetryingExecutor executor = new RetryingExecutor( scheduler, createUnregisteredChangelogStorageMetricGroup().getAttemptsPerUpload())) { executor.execute( policy, runnableToAction( () -> { try { task.accept(attemptsMade.incrementAndGet()); } finally { firstAttemptCompletedLatch.countDown(); } })); firstAttemptCompletedLatch.await(); } assertEquals(expectedAttempts, attemptsMade.get()); } private static RetriableAction<?> runnableToAction(RunnableWithException action) { return new RetriableAction<Object>() { @Override public Object tryExecute() throws Exception { action.run(); return null; } @Override public void completeWithResult(Object o) {} @Override public void discardResult(Object o) {} @Override public void handleFailure(Throwable throwable) {} }; } }
class RetryingExecutorTest { private static final ThrowingConsumer<Integer, Exception> FAILING_TASK = attempt -> { throw new IOException(); }; @Test public void testNoRetries() throws Exception { testPolicy(1, RetryPolicy.NONE, FAILING_TASK); } @Test public void testFixedRetryLimit() throws Exception { testPolicy(5, RetryPolicy.fixed(5, 0, 0), FAILING_TASK); } @Test @Test public void testFixedRetrySuccess() throws Exception { int successfulAttempt = 3; int maxAttempts = successfulAttempt * 2; testPolicy( successfulAttempt, RetryPolicy.fixed(maxAttempts, 0, 0), attempt -> { if (attempt < successfulAttempt) { throw new IOException(); } }); } @Test public void testNonRetryableException() throws Exception { testPolicy( 1, RetryPolicy.fixed(Integer.MAX_VALUE, 0, 0), ignored -> { throw new RuntimeException(); }); } @Test public void testRetryDelay() throws Exception { int delayAfterFailure = 123; int numAttempts = 2; testPolicy( numAttempts, RetryPolicy.fixed(Integer.MAX_VALUE, 0, delayAfterFailure), a -> { if (a < numAttempts) { throw new IOException(); } }, new DirectScheduledExecutorService() { @Override public ScheduledFuture<?> schedule( Runnable command, long delay, TimeUnit unit) { assertEquals(delayAfterFailure, delay); command.run(); return CompletedScheduledFuture.create(null); } }); } @Test public void testNoRetryDelayIfTimeout() throws Exception { int delayAfterFailure = 123; int numAttempts = 2; testPolicy( numAttempts, RetryPolicy.fixed(Integer.MAX_VALUE, 0, delayAfterFailure), a -> { if (a < numAttempts) { throw new TimeoutException(); } }, new DirectScheduledExecutorService() { @Override public ScheduledFuture<?> schedule( Runnable command, long delay, TimeUnit unit) { fail("task should be executed directly without delay after timeout"); return CompletedScheduledFuture.create(null); } }); } @Test public void testTimeout() throws Exception { int numAttempts = 2; int timeout = 500; CompletableFuture<Long> firstStart = new CompletableFuture<>(); CompletableFuture<Long> secondStart = new CompletableFuture<>(); testPolicy( numAttempts, RetryPolicy.fixed(Integer.MAX_VALUE, timeout, 0), a -> { long now = System.nanoTime(); if (a < numAttempts) { firstStart.complete(now); secondStart.get(); } else { secondStart.complete(now); } }, Executors.newScheduledThreadPool(2)); assertEquals( timeout, ((double) secondStart.get() - firstStart.get()) / 1_000_000, timeout * 0.75d /* future completion can be delayed arbitrarily causing start delta be less than timeout */); } private void testPolicy( int expectedAttempts, RetryPolicy policy, ThrowingConsumer<Integer, Exception> task) throws Exception { testPolicy(expectedAttempts, policy, task, new DirectScheduledExecutorService()); } private void testPolicy( int expectedAttempts, RetryPolicy policy, ThrowingConsumer<Integer, Exception> task, ScheduledExecutorService scheduler) throws Exception { AtomicInteger attemptsMade = new AtomicInteger(0); CountDownLatch firstAttemptCompletedLatch = new CountDownLatch(1); try (RetryingExecutor executor = new RetryingExecutor( scheduler, createUnregisteredChangelogStorageMetricGroup().getAttemptsPerUpload())) { executor.execute( policy, runnableToAction( () -> { try { task.accept(attemptsMade.incrementAndGet()); } finally { firstAttemptCompletedLatch.countDown(); } })); firstAttemptCompletedLatch.await(); } assertEquals(expectedAttempts, attemptsMade.get()); } private static RetriableAction<?> runnableToAction(RunnableWithException action) { return new RetriableAction<Object>() { @Override public Object tryExecute() throws Exception { action.run(); return null; } @Override public void completeWithResult(Object o) {} @Override public void discardResult(Object o) {} @Override public void handleFailure(Throwable throwable) {} }; } }
consider calling drain here to avoid returning a single frame even if multiple frames could be decoded
public int read(ByteBuffer dst) throws IOException { verifyHandshakeCompleted(); int bytesUnwrapped = applicationDataUnwrap(dst); if (bytesUnwrapped > 0) return bytesUnwrapped; int bytesRead = channelRead(); if (bytesRead == 0) return 0; return drain(dst); }
int bytesUnwrapped = applicationDataUnwrap(dst);
public int read(ByteBuffer dst) throws IOException { verifyHandshakeCompleted(); int bytesUnwrapped = drain(dst); if (bytesUnwrapped > 0) return bytesUnwrapped; int bytesRead = channelRead(); if (bytesRead == 0) return 0; return drain(dst); }
class TlsCryptoSocket implements CryptoSocket { private static final ByteBuffer NULL_BUFFER = ByteBuffer.allocate(0); private static final Logger log = Logger.getLogger(TlsCryptoSocket.class.getName()); private enum HandshakeState { NOT_STARTED, NEED_READ, NEED_WRITE, COMPLETED } private final SocketChannel channel; private final SSLEngine sslEngine; private final Buffer wrapBuffer; private final Buffer unwrapBuffer; private int sessionPacketBufferSize; private int sessionApplicationBufferSize; private ByteBuffer handshakeDummyBuffer; private HandshakeState handshakeState; public TlsCryptoSocket(SocketChannel channel, SSLEngine sslEngine) { this.channel = channel; this.sslEngine = sslEngine; SSLSession nullSession = sslEngine.getSession(); this.wrapBuffer = new Buffer(nullSession.getPacketBufferSize() * 2); this.unwrapBuffer = new Buffer(nullSession.getPacketBufferSize() * 2); this.handshakeDummyBuffer = ByteBuffer.allocate(nullSession.getApplicationBufferSize()); this.handshakeState = HandshakeState.NOT_STARTED; } @Override public SocketChannel channel() { return channel; } @Override public HandshakeResult handshake() throws IOException { HandshakeState newHandshakeState = processHandshakeState(this.handshakeState); log.fine(() -> String.format("Handshake state '%s -> %s'", this.handshakeState, newHandshakeState)); this.handshakeState = newHandshakeState; return toHandshakeResult(newHandshakeState); } private HandshakeState processHandshakeState(HandshakeState state) throws IOException { switch (state) { case NOT_STARTED: sslEngine.beginHandshake(); break; case NEED_WRITE: channelWrite(); break; case NEED_READ: channelRead(); break; case COMPLETED: return HandshakeState.COMPLETED; default: throw unhandledStateException(state); } while (true) { switch (sslEngine.getHandshakeStatus()) { case NOT_HANDSHAKING: if (wrapBuffer.bytes() > 0) return HandshakeState.NEED_WRITE; sslEngine.setEnableSessionCreation(false); handshakeDummyBuffer = null; SSLSession session = sslEngine.getSession(); sessionApplicationBufferSize = session.getApplicationBufferSize(); sessionPacketBufferSize = session.getPacketBufferSize(); return HandshakeState.COMPLETED; case NEED_TASK: sslEngine.getDelegatedTask().run(); break; case NEED_UNWRAP: if (wrapBuffer.bytes() > 0) return HandshakeState.NEED_WRITE; if (!handshakeUnwrap()) return HandshakeState.NEED_READ; break; case NEED_WRAP: if (!handshakeWrap()) return HandshakeState.NEED_WRITE; break; default: throw new IllegalStateException("Unexpected handshake status: " + sslEngine.getHandshakeStatus()); } } } private static HandshakeResult toHandshakeResult(HandshakeState state) { switch (state) { case NEED_READ: return HandshakeResult.NEED_READ; case NEED_WRITE: return HandshakeResult.NEED_WRITE; case COMPLETED: return HandshakeResult.DONE; default: throw unhandledStateException(state); } } @Override public int getMinimumReadBufferSize() { return sessionApplicationBufferSize; } @Override @Override public int drain(ByteBuffer dst) throws IOException { verifyHandshakeCompleted(); int totalBytesUnwrapped = 0; int bytesUnwrapped; do { bytesUnwrapped = applicationDataUnwrap(dst); totalBytesUnwrapped += bytesUnwrapped; } while (bytesUnwrapped > 0); return totalBytesUnwrapped; } @Override public int write(ByteBuffer src) throws IOException { if (flush() == FlushResult.NEED_WRITE) return 0; int totalBytesWrapped = 0; int bytesWrapped; do { bytesWrapped = applicationDataWrap(src); totalBytesWrapped += bytesWrapped; } while (bytesWrapped > 0 && wrapBuffer.bytes() < sessionPacketBufferSize); return totalBytesWrapped; } @Override public FlushResult flush() throws IOException { channelWrite(); return wrapBuffer.bytes() > 0 ? FlushResult.NEED_WRITE : FlushResult.DONE; } private boolean handshakeWrap() throws IOException { SSLEngineResult result = sslEngineWrap(NULL_BUFFER); switch (result.getStatus()) { case OK: return true; case BUFFER_OVERFLOW: return false; default: throw unexpectedStatusException(result.getStatus()); } } private int applicationDataWrap(ByteBuffer src) throws IOException { SSLEngineResult result = sslEngineWrap(src); if (result.getHandshakeStatus() != HandshakeStatus.NOT_HANDSHAKING) throw new SSLException("Renegotiation detected"); switch (result.getStatus()) { case OK: return result.bytesConsumed(); case BUFFER_OVERFLOW: return 0; default: throw unexpectedStatusException(result.getStatus()); } } private SSLEngineResult sslEngineWrap(ByteBuffer src) throws IOException { SSLEngineResult result = sslEngine.wrap(src, wrapBuffer.getWritable(sessionPacketBufferSize)); if (result.getStatus() == Status.CLOSED) throw new ClosedChannelException(); return result; } private boolean handshakeUnwrap() throws IOException { SSLEngineResult result = sslEngineUnwrap(handshakeDummyBuffer); switch (result.getStatus()) { case OK: if (result.bytesProduced() > 0) throw new SSLException("Got application data in handshake unwrap"); return true; case BUFFER_UNDERFLOW: return false; default: throw unexpectedStatusException(result.getStatus()); } } private int applicationDataUnwrap(ByteBuffer dst) throws IOException { SSLEngineResult result = sslEngineUnwrap(dst); if (result.getHandshakeStatus() != HandshakeStatus.NOT_HANDSHAKING) throw new SSLException("Renegotiation detected"); switch (result.getStatus()) { case OK: return result.bytesProduced(); case BUFFER_OVERFLOW: case BUFFER_UNDERFLOW: return 0; default: throw unexpectedStatusException(result.getStatus()); } } private SSLEngineResult sslEngineUnwrap(ByteBuffer dst) throws IOException { SSLEngineResult result = sslEngine.unwrap(unwrapBuffer.getReadable(), dst); if (result.getStatus() == Status.CLOSED) throw new ClosedChannelException(); return result; } private int channelRead() throws IOException { int read = channel.read(unwrapBuffer.getWritable(sessionPacketBufferSize)); if (read == -1) throw new ClosedChannelException(); return read; } private int channelWrite() throws IOException { return channel.write(wrapBuffer.getReadable()); } private static IllegalStateException unhandledStateException(HandshakeState state) { return new IllegalStateException("Unhandled state: " + state); } private static IllegalStateException unexpectedStatusException(Status status) { return new IllegalStateException("Unexpected status: " + status); } private void verifyHandshakeCompleted() throws SSLException { if (handshakeState != HandshakeState.COMPLETED) throw new SSLException("Handshake not completed: handshakeState=" + handshakeState); } }
class TlsCryptoSocket implements CryptoSocket { private static final ByteBuffer NULL_BUFFER = ByteBuffer.allocate(0); private static final Logger log = Logger.getLogger(TlsCryptoSocket.class.getName()); private enum HandshakeState { NOT_STARTED, NEED_READ, NEED_WRITE, COMPLETED } private final SocketChannel channel; private final SSLEngine sslEngine; private final Buffer wrapBuffer; private final Buffer unwrapBuffer; private int sessionPacketBufferSize; private int sessionApplicationBufferSize; private ByteBuffer handshakeDummyBuffer; private HandshakeState handshakeState; public TlsCryptoSocket(SocketChannel channel, SSLEngine sslEngine) { this.channel = channel; this.sslEngine = sslEngine; SSLSession nullSession = sslEngine.getSession(); this.wrapBuffer = new Buffer(nullSession.getPacketBufferSize() * 2); this.unwrapBuffer = new Buffer(nullSession.getPacketBufferSize() * 2); this.handshakeDummyBuffer = ByteBuffer.allocate(nullSession.getApplicationBufferSize()); this.handshakeState = HandshakeState.NOT_STARTED; } @Override public SocketChannel channel() { return channel; } @Override public HandshakeResult handshake() throws IOException { HandshakeState newHandshakeState = processHandshakeState(this.handshakeState); log.fine(() -> String.format("Handshake state '%s -> %s'", this.handshakeState, newHandshakeState)); this.handshakeState = newHandshakeState; return toHandshakeResult(newHandshakeState); } private HandshakeState processHandshakeState(HandshakeState state) throws IOException { switch (state) { case NOT_STARTED: sslEngine.beginHandshake(); break; case NEED_WRITE: channelWrite(); break; case NEED_READ: channelRead(); break; case COMPLETED: return HandshakeState.COMPLETED; default: throw unhandledStateException(state); } while (true) { switch (sslEngine.getHandshakeStatus()) { case NOT_HANDSHAKING: if (wrapBuffer.bytes() > 0) return HandshakeState.NEED_WRITE; sslEngine.setEnableSessionCreation(false); handshakeDummyBuffer = null; SSLSession session = sslEngine.getSession(); sessionApplicationBufferSize = session.getApplicationBufferSize(); sessionPacketBufferSize = session.getPacketBufferSize(); return HandshakeState.COMPLETED; case NEED_TASK: sslEngine.getDelegatedTask().run(); break; case NEED_UNWRAP: if (wrapBuffer.bytes() > 0) return HandshakeState.NEED_WRITE; if (!handshakeUnwrap()) return HandshakeState.NEED_READ; break; case NEED_WRAP: if (!handshakeWrap()) return HandshakeState.NEED_WRITE; break; default: throw new IllegalStateException("Unexpected handshake status: " + sslEngine.getHandshakeStatus()); } } } private static HandshakeResult toHandshakeResult(HandshakeState state) { switch (state) { case NEED_READ: return HandshakeResult.NEED_READ; case NEED_WRITE: return HandshakeResult.NEED_WRITE; case COMPLETED: return HandshakeResult.DONE; default: throw unhandledStateException(state); } } @Override public int getMinimumReadBufferSize() { return sessionApplicationBufferSize; } @Override @Override public int drain(ByteBuffer dst) throws IOException { verifyHandshakeCompleted(); int totalBytesUnwrapped = 0; int bytesUnwrapped; do { bytesUnwrapped = applicationDataUnwrap(dst); totalBytesUnwrapped += bytesUnwrapped; } while (bytesUnwrapped > 0); return totalBytesUnwrapped; } @Override public int write(ByteBuffer src) throws IOException { if (flush() == FlushResult.NEED_WRITE) return 0; int totalBytesWrapped = 0; int bytesWrapped; do { bytesWrapped = applicationDataWrap(src); totalBytesWrapped += bytesWrapped; } while (bytesWrapped > 0 && wrapBuffer.bytes() < sessionPacketBufferSize); return totalBytesWrapped; } @Override public FlushResult flush() throws IOException { channelWrite(); return wrapBuffer.bytes() > 0 ? FlushResult.NEED_WRITE : FlushResult.DONE; } private boolean handshakeWrap() throws IOException { SSLEngineResult result = sslEngineWrap(NULL_BUFFER); switch (result.getStatus()) { case OK: return true; case BUFFER_OVERFLOW: return false; default: throw unexpectedStatusException(result.getStatus()); } } private int applicationDataWrap(ByteBuffer src) throws IOException { SSLEngineResult result = sslEngineWrap(src); if (result.getHandshakeStatus() != HandshakeStatus.NOT_HANDSHAKING) throw new SSLException("Renegotiation detected"); switch (result.getStatus()) { case OK: return result.bytesConsumed(); case BUFFER_OVERFLOW: return 0; default: throw unexpectedStatusException(result.getStatus()); } } private SSLEngineResult sslEngineWrap(ByteBuffer src) throws IOException { SSLEngineResult result = sslEngine.wrap(src, wrapBuffer.getWritable(sessionPacketBufferSize)); if (result.getStatus() == Status.CLOSED) throw new ClosedChannelException(); return result; } private boolean handshakeUnwrap() throws IOException { SSLEngineResult result = sslEngineUnwrap(handshakeDummyBuffer); switch (result.getStatus()) { case OK: if (result.bytesProduced() > 0) throw new SSLException("Got application data in handshake unwrap"); return true; case BUFFER_UNDERFLOW: return false; default: throw unexpectedStatusException(result.getStatus()); } } private int applicationDataUnwrap(ByteBuffer dst) throws IOException { SSLEngineResult result = sslEngineUnwrap(dst); if (result.getHandshakeStatus() != HandshakeStatus.NOT_HANDSHAKING) throw new SSLException("Renegotiation detected"); switch (result.getStatus()) { case OK: return result.bytesProduced(); case BUFFER_OVERFLOW: case BUFFER_UNDERFLOW: return 0; default: throw unexpectedStatusException(result.getStatus()); } } private SSLEngineResult sslEngineUnwrap(ByteBuffer dst) throws IOException { SSLEngineResult result = sslEngine.unwrap(unwrapBuffer.getReadable(), dst); if (result.getStatus() == Status.CLOSED) throw new ClosedChannelException(); return result; } private int channelRead() throws IOException { int read = channel.read(unwrapBuffer.getWritable(sessionPacketBufferSize)); if (read == -1) throw new ClosedChannelException(); return read; } private int channelWrite() throws IOException { return channel.write(wrapBuffer.getReadable()); } private static IllegalStateException unhandledStateException(HandshakeState state) { return new IllegalStateException("Unhandled state: " + state); } private static IllegalStateException unexpectedStatusException(Status status) { return new IllegalStateException("Unexpected status: " + status); } private void verifyHandshakeCompleted() throws SSLException { if (handshakeState != HandshakeState.COMPLETED) throw new SSLException("Handshake not completed: handshakeState=" + handshakeState); } }
Move this line to `else` block
public void createFunction(CreateFunctionStmt stmt) throws UserException { FunctionName name = stmt.getFunctionName(); InternalCatalog internalCatalog = getInternalCatalog(); if (SetType.GLOBAL.equals(stmt.getType())) { globalFunctionMgr.addFunction(stmt.getFunction(), stmt.isIfNotExists()); } else { Database db = internalCatalog.getDbOrDdlException(name.getDb()); db.addFunction(stmt.getFunction(), stmt.isIfNotExists()); } }
InternalCatalog internalCatalog = getInternalCatalog();
public void createFunction(CreateFunctionStmt stmt) throws UserException { if (SetType.GLOBAL.equals(stmt.getType())) { globalFunctionMgr.addFunction(stmt.getFunction(), stmt.isIfNotExists()); } else { Database db = getInternalCatalog().getDbOrDdlException(stmt.getFunctionName().getDb()); db.addFunction(stmt.getFunction(), stmt.isIfNotExists()); } }
class SingletonHolder { private static final Env INSTANCE = new Env(); }
class SingletonHolder { private static final Env INSTANCE = new Env(); }
@SupunS No switch case means, `( )` is considered to be `NIL_LITERAL` kind and `NULL` is considered to be `NULL_KEYWORD` kind, and will have separate `pareseNilLiteral( )`, `pareseNullKeyword( )` methods ?
private STNode parseNilLiteral() { STToken token = peek(); switch (token.kind) { case NULL_KEYWORD: STNode nullKeyword = consume(); STNode emptyNode = STNodeFactory.createEmptyNode(); return STNodeFactory.createNilLiteral(nullKeyword, emptyNode); case OPEN_PAREN_TOKEN: startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteral(openParenthesisToken, closeParenthesisToken); default: Solution sol = recover(token, ParserRuleContext.NIL_LITERAL); return sol.recoveredNode; } }
}
private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); }
class BallerinaParser { private final BallerinaParserErrorHandler errorHandler; private final AbstractTokenReader tokenReader; private ParserRuleContext currentParamKind = ParserRuleContext.REQUIRED_PARAM; protected BallerinaParser(AbstractTokenReader tokenReader) { this.tokenReader = tokenReader; this.errorHandler = new BallerinaParserErrorHandler(tokenReader, this); } /** * Start parsing the given input. * * @return Parsed node */ public STNode parse() { return parseCompUnit(); } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody(); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: return parseOpenParenthesis(); case PARAM_LIST: return parseParamList(); case RETURN_TYPE_DESCRIPTOR: return parseReturnTypeDescriptor(); case SIMPLE_TYPE_DESCRIPTOR: return parseTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case FUNC_BODY_BLOCK: return parseFunctionBodyBlock(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case EXPRESSION: return parseExpression(); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case ASSIGNMENT_STMT: return parseAssignmentStmt(); case EXPRESSION_RHS: return parseExpressionRhs((STNode) args[0]); case PARAMETER: return parseParameter((STNode) args[0], (int) args[1]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((STNode) args[0], (STNode) args[1], (int) args[2]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case PARAMETER_RHS: return parseParameterRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4]); case ASSIGNMENT_OR_VAR_DECL_STMT: return parseAssignmentOrVarDecl(); case ASSIGNMENT_OR_VAR_DECL_STMT_RHS: return parseAssignmentOrVarDeclRhs((STNode) args[0]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case NAMED_OR_POSITIONAL_ARG_RHS: return parseNamedOrPositionalArg((STNode) args[0]); case RECORD_BODY_END: return parseRecordBodyCloseDelimiter(); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptor(); case OBJECT_MEMBER: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case BOOLEAN_LITERAL: return parseBooleanLiteral(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case FIELD_OR_FUNC_NAME: case SERVICE_NAME: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: return parseMappingField((STNode) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case NIL_LITERAL: return parseNilLiteral(); default: throw new IllegalStateException("Cannot re-parse rule: " + context); } } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /* * Private methods */ private STToken peek() { return this.tokenReader.peek(); } private STToken peek(int k) { return this.tokenReader.peek(k); } private STToken consume() { return this.tokenReader.read(); } private Solution recover(STToken token, ParserRuleContext currentCtx, Object... parsedNodes) { return this.errorHandler.recover(currentCtx, token, parsedNodes); } private void startContext(ParserRuleContext context) { this.errorHandler.startContext(context); } private void endContext() { this.errorHandler.endContext(); } /** * Switch the current context to the provided one. This will replace the * existing context. * * @param context Context to switch to. */ private void switchContext(ParserRuleContext context) { this.errorHandler.switchContext(context); } /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePart(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case HASH_TOKEN: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case SERVICE_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case SIMPLE_TYPE: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case SIMPLE_TYPE: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case SERVICE_KEYWORD: case OPEN_PAREN_TOKEN: break; case IDENTIFIER_TOKEN: if (isVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case QUESTION_MARK_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.switchMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.resetMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgName(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclaration(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); STNode identifier; STNode dotToken; while (!isEndOfImportModuleName(nextTokenKind)) { dotToken = parseDotToken(); identifier = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); STNode moduleNamePart = STNodeFactory.createSubModuleName(dotToken, identifier); moduleNameParts.add(moduleNamePart); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersion(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersion(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefix(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, getQualifier(qualifier)); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case SIMPLE_TYPE: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: return parseModuleVarDecl(metadata, qualifier); case IDENTIFIER_TOKEN: if (isVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse function definition. A function definition has the following structure. * </p> * <code> * function-defn := FUNCTION identifier function-signature function-body * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFunctionDefinition(STNode metadata, STNode visibilityQualifier) { startContext(ParserRuleContext.FUNC_DEFINITION); STNode functionKeyword = parseFunctionKeyword(); STNode name = parseFunctionName(); STNode openParenthesis = parseOpenParenthesis(); STNode parameters = parseParamList(); STNode closeParenthesis = parseCloseParenthesis(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode body = parseFunctionBody(); endContext(); return STNodeFactory.createFunctionDefinition(metadata, visibilityQualifier, functionKeyword, name, openParenthesis, parameters, closeParenthesis, returnTypeDesc, body); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @return Parsed node */ private STNode parseOpenParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @return Parsed node */ private STNode parseParamList() { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); this.currentParamKind = ParserRuleContext.REQUIRED_PARAM; paramsList.add(parseParameter(startingComma)); token = peek(); while (!isEndOfParametersList(token.kind)) { STNode leadingComma = parseComma(); STNode param = parseParameter(leadingComma); paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param leadingComma Comma that occurs before the param * @return Parsed node */ private STNode parseParameter(STNode leadingComma) { STToken token = peek(); if (this.currentParamKind == ParserRuleContext.REST_PARAM) { this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); } else { startContext(this.currentParamKind); } return parseParameter(token.kind, leadingComma, 1); } private STNode parseParameter(STNode leadingComma, int nextTokenOffset) { return parseParameter(peek().kind, leadingComma, nextTokenOffset); } private STNode parseParameter(SyntaxKind nextTokenKind, STNode leadingComma, int nextTokenOffset) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode qualifier = STNodeFactory.createEmptyNode(); return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER, leadingComma, nextTokenOffset); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, leadingComma, 0); } return parseParamGivenAnnots(nextTokenKind, leadingComma, annots, 1); } private STNode parseParamGivenAnnots(STNode leadingComma, STNode annots, int nextNextTokenOffset) { return parseParamGivenAnnots(peek().kind, leadingComma, annots, nextNextTokenOffset); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, STNode leadingComma, STNode annots, int nextTokenOffset) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: qualifier = STNodeFactory.createEmptyNode(); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { qualifier = STNodeFactory.createEmptyNode(); break; } case AT_TOKEN: default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, leadingComma, annots, nextTokenOffset); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, leadingComma, annots, 0); } return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } private STNode parseParamGivenAnnotsAndQualifier(STNode leadingComma, STNode annots, STNode qualifier) { STNode type = parseTypeDescriptor(); STNode param = parseAfterParamType(leadingComma, annots, qualifier, type); endContext(); return param; } /** * Check whether the cursor is at the start of a parameter that doesn't have annotations. * * @param tokenOffset Offset of the token to check * @return <code>true</code> if the cursor is at the start of a parameter. <code>false</code> otherwise. */ private boolean isParamWithoutAnnotStart(int tokenOffset) { STToken nextToken = peek(tokenOffset + 1); switch (nextToken.kind) { case PUBLIC_KEYWORD: return isParamWithoutAnnotStart(tokenOffset + 1); case ELLIPSIS_TOKEN: return true; case IDENTIFIER_TOKEN: return true; default: return false; } } private STNode parseAfterParamType(STNode leadingComma, STNode annots, STNode qualifier, STNode type) { STToken token = peek(); return parseAfterParamType(token.kind, leadingComma, annots, qualifier, type); } private STNode parseAfterParamType(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type) { switch (tokenKind) { case ELLIPSIS_TOKEN: this.currentParamKind = ParserRuleContext.REST_PARAM; switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); STNode paramName = parseVariableName(); return STNodeFactory.createRestParameter(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(leadingComma, annots, qualifier, type, paramName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, leadingComma, annots, qualifier, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, leadingComma, annots, qualifier, type); } } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (this.currentParamKind == ParserRuleContext.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameter(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (this.currentParamKind == ParserRuleContext.REQUIRED_PARAM) { this.currentParamKind = ParserRuleContext.DEFAULTABLE_PARAM; switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameter(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_RHS, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return false; } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return false; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseReturnTypeDescriptor() { startContext(ParserRuleContext.RETURN_TYPE_DESCRIPTOR); STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { endContext(); return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(); endContext(); return STNodeFactory.createReturnTypeDescriptor(returnsKeyword, annot, type); } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor() { STToken token = peek(); STNode type = parseTypeDescriptor(token.kind); STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseOptionalTypeDescriptor(type); default: return type; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTypeDescriptor(SyntaxKind tokenKind) { switch (tokenKind) { case SIMPLE_TYPE: case SERVICE_KEYWORD: return parseSimpleTypeDescriptor(); case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilTypeDescriptor(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptor(solution.tokenKind); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); switch (node.kind) { case SIMPLE_TYPE: case SERVICE_KEYWORD: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @return Parsed node */ private STNode parseFunctionBody() { STToken token = peek(); return parseFunctionBody(token.kind); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @return Parsed node */ private STNode parseFunctionBodyBlock() { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatement(openBrace, stmts, closeBrace); } /** * Check whether the given token is an end of a block. * * @param tokenKind STToken to check * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfBlockNode(SyntaxKind tokenKind) { return isEndOfBlockNode(tokenKind, 1); } private boolean isEndOfBlockNode(SyntaxKind tokenKind, int lookahead) { switch (tokenKind) { case EOF_TOKEN: case HASH_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case FUNCTION_KEYWORD: case IMPORT_KEYWORD: case ELSE_KEYWORD: case RESOURCE_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.STATEMENT, lookahead); case AT_TOKEN: lookahead = getNumberOfTokensToAnnotsEnd(); return isEndOfBlockNode(peek(lookahead).kind, lookahead); default: return false; } } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { STToken nexNextToken = peek(2); switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1); default: switch (nexNextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 2); default: return false; } } } private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind, SyntaxKind nextNextTokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); default: switch (nextNextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 2); default: return false; } } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBody(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case EQUAL_GT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(); endContext(); return STNodeFactory.createRecordTypeDescriptor(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter() { STToken token = peek(); return parseRecordBodyCloseDelimiter(token.kind); } private STNode parseRecordBodyCloseDelimiter(SyntaxKind kind) { switch (kind) { case CLOSE_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case CLOSE_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { startContext(ParserRuleContext.RECORD_FIELD); STToken token = peek(); if (token.kind == SyntaxKind.ASTERISK_TOKEN) { STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReference(asterisk, type, semicolonToken); } STNode metadata = parseMetaData(); STNode type = parseTypeDescriptor(); STNode fieldOrRestDesc; if (isInclusive) { STNode fieldName = parseVariableName(); fieldOrRestDesc = parseFieldDescriptorRhs(metadata, type, fieldName); } else { fieldOrRestDesc = parseFieldOrRestDescriptorRhs(metadata, type); } endContext(); return fieldOrRestDesc; } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeRefOrPkgRef = consume(); return parseQualifiedIdentifier(typeRefOrPkgRef); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return identifier; } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedIdentifier(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptor(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordField(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordField(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValue(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { STToken token = peek(); ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfBlockNode(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } stmts.add(stmt); token = peek(); } return STNodeFactory.createNodeList(stmts); } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind); } private STNode parseStatement(SyntaxKind tokenKind) { STNode annots = null; switch (tokenKind) { case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case IDENTIFIER_TOKEN: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case OPEN_PAREN_TOKEN: break; default: if (isEndOfBlockNode(tokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind); } return parseStatement(tokenKind, annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next tokenKind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots) { switch (tokenKind) { case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IDENTIFIER_TOKEN: return parseAssignmentOrVarDecl(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); default: break; } if (annots != null) { this.errorHandler.reportInvalidNode(null, "invalid annotation"); } switch (tokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCallStatementWithCheck(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); default: if (isEndOfBlockNode(tokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots); } } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar); endContext(); return varDecl; } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); expr = parseExpression(); semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: if (isModuleVar) { this.errorHandler.reportMissingTokenError("assignment required"); } assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type, varName, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclaration(metadata, finalKeyword, type, varName, assign, expr, semicolon); } return STNodeFactory.createVariableDeclaration(metadata, finalKeyword, type, varName, assign, expr, semicolon); } /** * If the statement starts with an identifier, it could be either an assignment statement or * a var-decl-stmt with a user defined type. This method will parse such ambiguous scenarios. * * @return Parsed node */ private STNode parseAssignmentOrVarDecl() { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); STNode identifier = parseStatementStartIdentifier(); STNode assignmentOrVarDecl = parseAssignmentOrVarDeclRhs(identifier); endContext(); return assignmentOrVarDecl; } /** * Parse the second portion of an assignment statement or a var-decl statement when ambiguous. * * @param typeOrVarName Type name or variable name * @return Parsed node */ private STNode parseAssignmentOrVarDeclRhs(STNode typeOrVarName) { STToken token = peek(); return parseAssignmentOrVarDeclRhs(token.kind, typeOrVarName); } /** * Parse the second portion of an assignment statement or a var-decl statement when ambiguous, * given the next token kind. * * @param nextTokenKind Next token kind * @param identifier Identifier at the start of the statement * @return Parsed node */ private STNode parseAssignmentOrVarDeclRhs(SyntaxKind nextTokenKind, STNode identifier) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: STNode annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode varName = parseVariableName(); return parseVarDeclRhs(annots, finalKeyword, identifier, varName, false); case EQUAL_TOKEN: return parseAssignmentStmtRhs(identifier); case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: STNode expr = parseExpressionRhs(identifier, true); switch (expr.kind) { case METHOD_CALL: case FUNCTION_CALL: return parseCallStatement(expr); case FIELD_ACCESS: case MEMBER_ACCESS: STToken nextToken = peek(); if (isCompoundBinaryOperator(nextToken.kind)) { return parseCompoundAssignmentStmtRhs(expr); } return parseAssignmentStmtRhs(expr); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); return parseCallStatement(expr); } default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT_RHS, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAssignmentOrVarDeclRhs(solution.tokenKind, identifier); } } /** * <p> * Parse assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr = action-or-expr ;</code> * * @return Parsed node */ private STNode parseAssignmentStmt() { startContext(ParserRuleContext.ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode assignmentStmt = parseAssignmentStmtRhs(varName); endContext(); return assignmentStmt; } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param expression LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode expression) { STNode assign = parseAssignOp(); STNode expr = parseExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatement(expression, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseExpression() { return parseExpression(OperatorPrecedence.LOGICAL_OR, false); } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isAssignmentLhs) { STNode expr = parseTerminalExpression(); return parseExpressionRhs(precedenceLevel, expr, isAssignmentLhs); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @return Parsed node */ private STNode parseTerminalExpression() { STToken token = peek(); return parseTerminalExpression(token.kind); } private STNode parseTerminalExpression(SyntaxKind kind) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: return parseLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_NAME); case OPEN_PAREN_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(); case TRUE_KEYWORD: case FALSE_KEYWORD: return parseBooleanLiteral(); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(); case NULL_KEYWORD: return parseNilLiteral(); default: Solution solution = recover(peek(), ParserRuleContext.EXPRESSION); if (solution.recoveredNode.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseQualifiedIdentifier(solution.recoveredNode); } return solution.recoveredNode; } } private STNode parseExpressionRhs(STNode lhsExpr) { return parseExpressionRhs(OperatorPrecedence.LOGICAL_OR, lhsExpr, false); } /** * Parse the right-hand-side of an expression. * * @return Parsed node */ private STNode parseExpressionRhs(STNode lhsExpr, boolean isAssignmentLhs) { return parseExpressionRhs(OperatorPrecedence.LOGICAL_OR, lhsExpr, isAssignmentLhs); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isAssignmentLhs) { STToken token = peek(); return parseExpressionRhs(precedenceLevel, token.kind, lhsExpr, isAssignmentLhs); } /** * Parse the right hand side of an expression given the next token kind. * * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param tokenKind Next token kind * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence currentPrecedenceLevel, SyntaxKind tokenKind, STNode lhsExpr, boolean isAssignmentLhs) { if (isEndOfExpression(tokenKind, isAssignmentLhs)) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, lhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(currentPrecedenceLevel, binaryOpKind, lhsExpr, isAssignmentLhs); } else { return parseExpressionRhs(currentPrecedenceLevel, solution.tokenKind, lhsExpr, isAssignmentLhs); } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) { return lhsExpr; } STNode newLhsExpr; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseIsExpression(lhsExpr); break; default: STNode operator = parseBinaryOperator(); STNode rhsExpr = parseExpression(nextOperatorPrecedence, isAssignmentLhs); newLhsExpr = STNodeFactory.createBinaryExpression(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isAssignmentLhs); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr) { STNode openBracket = consume(); STNode keyExpr; if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { this.errorHandler.reportMissingTokenError("missing expression"); keyExpr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { keyExpr = parseExpression(); } STNode closeBracket = parseCloseBracket(); return STNodeFactory.createMemberAccessExpression(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access expression and method call expression. * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpression(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpression(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @return Parsed node */ private STNode parseBracedExpression() { STNode openParen = parseOpenParenthesis(); STNode expr = parseExpression(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpression(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isAssignmentLhs) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case SIMPLE_TYPE: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case HASH_TOKEN: return true; default: if (isAssignmentLhs) { return isBinaryOperator(tokenKind); } return false; } } /** * Parse expressions that references variable or functions at the start of the expression. * * @return Parsed node */ private STNode parseLiteral() { return consume(); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpression(identifier, openParen, args, closeParen); } /** * Parse function call argument list. * * @return Parsed agrs list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind = parseFirstArg(argsList); parseFollowUpArg(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse the first argument of a function call. * * @param argsList Arguments list to which the parsed argument must be added * @return Kind of the argument first argument. */ private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) { startContext(ParserRuleContext.ARG); STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); endContext(); if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); return arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); return SyntaxKind.POSITIONAL_ARG; } } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { startContext(ParserRuleContext.ARG); STNode leadingComma = parseComma(); nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma); endContext(); break; } STNode arg = parseArg(nextToken.kind, leadingComma); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); endContext(); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgument(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma); break; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); arg = STNodeFactory.createPositionalArgument(leadingComma, expr); break; } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma) { STToken secondToken = peek(2); switch (secondToken.kind) { case EQUAL_TOKEN: STNode argNameOrVarRef = consume(); STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createNamedArgument(leadingComma, argNameOrVarRef, equal, expr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: argNameOrVarRef = consume(); return STNodeFactory.createPositionalArgument(leadingComma, argNameOrVarRef); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); return STNodeFactory.createPositionalArgument(leadingComma, expr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptor(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); STToken nextToken = peek(1); STToken nextNextToken = peek(2); while (!isEndOfObjectTypeNode(nextToken.kind, nextNextToken.kind)) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode field = parseObjectMember(nextToken.kind); endContext(); if (field == null) { break; } objectMembers.add(field); nextToken = peek(1); nextNextToken = peek(2); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case IDENTIFIER_TOKEN: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: metadata = createEmptyMetadata(); break; case HASH_TOKEN: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (isEndOfObjectTypeNode(solution.tokenKind, nextTokenKind)) { return null; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReference(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; case IDENTIFIER_TOKEN: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (isEndOfObjectTypeNode(solution.tokenKind, nextTokenKind)) { return null; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (visibilityQualifiers.kind != SyntaxKind.NONE) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectField(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFunctionDefinition(metadata, methodQualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatement(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatement(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlock(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatement(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatement(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse boolean literal. * * @return Parsed node */ private STNode parseBooleanLiteral() { STToken token = peek(); switch (token.kind) { case TRUE_KEYWORD: case FALSE_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.BOOLEAN_LITERAL); return sol.recoveredNode; } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { STNode semicolon = parseSemicolon(); return STNodeFactory.createCallStatement(expression, semicolon); } private STNode parseCallStatementWithCheck() { startContext(ParserRuleContext.CALL_STMT); STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(); validateExprInCallStatement(checkingKeyword, expr); STNode checkExpr = STNodeFactory.createCheckExpression(checkingKeyword, expr); STNode checkStmt = parseCallStatement(checkExpr); endContext(); return checkStmt; } /** * Validate the call-expression in the call statement. Call expression takes the following structure. * <p> * <code>call-expr := function-call-expr | method-call-expr | checking-keyword call-expr</code> * * @param checkingKeyword Checking keyword observed before the expression. * @param expr Expression followed by the checking keyword */ private void validateExprInCallStatement(STNode checkingKeyword, STNode expr) { switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: STCheckExpression checkExpr = (STCheckExpression) expr; validateExprInCallStatement(checkExpr.checkKeyword, checkExpr.expression); break; default: if (isMissingNode(expr)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the '" + checkingKeyword.toString().trim() + "' keyword must be a func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { return node instanceof STMissingToken; } /** * Parse check expression. * <p> * <code> * checking-expr := checking-keyword expression * </code> * * @return Check expression node */ private STNode parseCheckExpression() { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, false); return STNodeFactory.createCheckExpression(checkingKeyword, expr); } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatement(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatement(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STToken token = peek(); return parseReturnStatementRhs(token.kind, returnKeyword); } /** * Parse the right hand side of a return statement, given the * next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseReturnStatementRhs(SyntaxKind tokenKind, STNode returnKeyword) { STNode expr; STNode semicolon; switch (tokenKind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseExpression(); break; } semicolon = parseSemicolon(); return STNodeFactory.createReturnStatement(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpression(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { leadingComma = parseComma(); field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case AT_TOKEN: case HASH_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: case SIMPLE_TYPE: return true; default: return false; } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificField(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadField(leadingComma, ellipsis, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificField(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameField(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param expression LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode expression) { STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatement(expression, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclaration(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: return true; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfListenersList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfListenersList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfListenersList(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case AT_TOKEN: case HASH_TOKEN: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case SIMPLE_TYPE: return true; default: return false; } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItem(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBody(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case HASH_TOKEN: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFunctionDefinition(metadata, resourceKeyword); case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, STNodeFactory.createEmptyNode()); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return false; case ON_KEYWORD: return true; default: ParserRuleContext sol = this.errorHandler.findBestPath(currentContext); return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE; } case ON_KEYWORD: return true; default: Solution sol = recover(peek(), ParserRuleContext.STATEMENT); return sol.tokenKind == SyntaxKind.CLOSE_BRACE_TOKEN; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclaration(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclaration(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = typeOrVarName; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclaration(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptor(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @return Typeof expression node */ private STNode parseTypeofExpression() { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, false); return STNodeFactory.createTypeofExpression(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptor(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @return Unary expression node */ private STNode parseUnaryExpression() { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, false); return STNodeFactory.createUnaryExpression(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); STNode annotValue = parseMappingConstructorExpr(); return STNodeFactory.createAnnotation(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STToken nextToken = peek(); return parseMetaData(nextToken.kind); } private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case HASH_TOKEN: consume(); docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadata(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadata(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Get the number of tokens to skip to reach the end of annotations. * * @return Number of tokens to skip to reach the end of annotations */ private int getNumberOfTokensToAnnotsEnd() { STToken nextToken; int lookahead = 0; while (true) { nextToken = peek(lookahead); switch (nextToken.kind) { case EOF_TOKEN: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case IMPORT_KEYWORD: case SERVICE_KEYWORD: return lookahead; case IDENTIFIER_TOKEN: if (isVarDeclStart(lookahead)) { return lookahead; } default: lookahead++; break; } } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseIsExpression(STNode lhsExpr) { startContext(ParserRuleContext.IS_EXPRESSION); STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(); endContext(); return STNodeFactory.createIsExpression(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatement( annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse nil literal. * * @return Parsed node */ }
class BallerinaParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; private final BallerinaParserErrorHandler errorHandler; private final AbstractTokenReader tokenReader; private ParserRuleContext currentParamKind = ParserRuleContext.REQUIRED_PARAM; protected BallerinaParser(AbstractTokenReader tokenReader) { this.tokenReader = tokenReader; this.errorHandler = new BallerinaParserErrorHandler(tokenReader, this); } /** * Start parsing the given input. * * @return Parsed node */ public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody(); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: return parseOpenParenthesis(); case PARAM_LIST: return parseParamList(); case RETURN_TYPE_DESCRIPTOR: return parseReturnTypeDescriptor(); case SIMPLE_TYPE_DESCRIPTOR: return parseTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case FUNC_BODY_BLOCK: return parseFunctionBodyBlock(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((boolean) args[0], (boolean) args[1]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[1], (STNode) args[0], (boolean) args[2], (boolean) args[3]); case PARAMETER: return parseParameter((STNode) args[0], (int) args[1]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((STNode) args[0], (STNode) args[1], (int) args[2]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case PARAMETER_RHS: return parseParameterRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case NAMED_OR_POSITIONAL_ARG_RHS: return parseNamedOrPositionalArg((STNode) args[0]); case RECORD_BODY_END: return parseRecordBodyCloseDelimiter(); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptor(); case OBJECT_MEMBER: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case BOOLEAN_LITERAL: return parseBooleanLiteral(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case FIELD_OR_FUNC_NAME: case SERVICE_NAME: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: return parseMappingField((STNode) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEFINITION: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStamentStartWithExpr((STNode) args[0]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_WITH_IDENTIFIER: return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]); case NULL_KEYWORD: return parseNullKeyword(); case NIL_LITERAL: return parseNilLiteral(); default: throw new IllegalStateException("Cannot re-parse rule: " + context); } } /* * Private methods */ private STToken peek() { return this.tokenReader.peek(); } private STToken peek(int k) { return this.tokenReader.peek(k); } private STToken consume() { return this.tokenReader.read(); } private Solution recover(STToken token, ParserRuleContext currentCtx, Object... parsedNodes) { return this.errorHandler.recover(currentCtx, token, parsedNodes); } private void startContext(ParserRuleContext context) { this.errorHandler.startContext(context); } private void endContext() { this.errorHandler.endContext(); } /** * Switch the current context to the provided one. This will replace the * existing context. * * @param context Context to switch to. */ private void switchContext(ParserRuleContext context) { this.errorHandler.switchContext(context); } /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case HASH_TOKEN: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case SERVICE_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case SIMPLE_TYPE: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case SIMPLE_TYPE: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case SERVICE_KEYWORD: case OPEN_PAREN_TOKEN: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.switchMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.resetMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); STNode identifier; STNode dotToken; while (!isEndOfImportModuleName(nextTokenKind)) { dotToken = parseDotToken(); identifier = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); STNode moduleNamePart = STNodeFactory.createSubModuleNameNode(dotToken, identifier); moduleNameParts.add(moduleNamePart); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, getQualifier(qualifier)); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case SIMPLE_TYPE: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: return parseModuleVarDecl(metadata, qualifier); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse function definition. A function definition has the following structure. * </p> * <code> * function-defn := FUNCTION identifier function-signature function-body * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFunctionDefinition(STNode metadata, STNode visibilityQualifier) { startContext(ParserRuleContext.FUNC_DEFINITION); STNode functionKeyword = parseFunctionKeyword(); STNode name = parseFunctionName(); STNode openParenthesis = parseOpenParenthesis(); STNode parameters = parseParamList(); STNode closeParenthesis = parseCloseParenthesis(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode body = parseFunctionBody(); endContext(); return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, openParenthesis, parameters, closeParenthesis, returnTypeDesc, body); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @return Parsed node */ private STNode parseOpenParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @return Parsed node */ private STNode parseParamList() { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); this.currentParamKind = ParserRuleContext.REQUIRED_PARAM; paramsList.add(parseParameter(startingComma)); token = peek(); while (!isEndOfParametersList(token.kind)) { STNode leadingComma = parseComma(); STNode param = parseParameter(leadingComma); paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param leadingComma Comma that occurs before the param * @return Parsed node */ private STNode parseParameter(STNode leadingComma) { STToken token = peek(); if (this.currentParamKind == ParserRuleContext.REST_PARAM) { this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); } else { startContext(this.currentParamKind); } return parseParameter(token.kind, leadingComma, 1); } private STNode parseParameter(STNode leadingComma, int nextTokenOffset) { return parseParameter(peek().kind, leadingComma, nextTokenOffset); } private STNode parseParameter(SyntaxKind nextTokenKind, STNode leadingComma, int nextTokenOffset) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode qualifier = STNodeFactory.createEmptyNode(); return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER, leadingComma, nextTokenOffset); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, leadingComma, 0); } return parseParamGivenAnnots(nextTokenKind, leadingComma, annots, 1); } private STNode parseParamGivenAnnots(STNode leadingComma, STNode annots, int nextNextTokenOffset) { return parseParamGivenAnnots(peek().kind, leadingComma, annots, nextNextTokenOffset); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, STNode leadingComma, STNode annots, int nextTokenOffset) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: qualifier = STNodeFactory.createEmptyNode(); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { qualifier = STNodeFactory.createEmptyNode(); break; } case AT_TOKEN: default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, leadingComma, annots, nextTokenOffset); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, leadingComma, annots, 0); } return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } private STNode parseParamGivenAnnotsAndQualifier(STNode leadingComma, STNode annots, STNode qualifier) { STNode type = parseTypeDescriptor(); STNode param = parseAfterParamType(leadingComma, annots, qualifier, type); endContext(); return param; } /** * Check whether the cursor is at the start of a parameter that doesn't have annotations. * * @param tokenOffset Offset of the token to check * @return <code>true</code> if the cursor is at the start of a parameter. <code>false</code> otherwise. */ private boolean isParamWithoutAnnotStart(int tokenOffset) { STToken nextToken = peek(tokenOffset + 1); switch (nextToken.kind) { case PUBLIC_KEYWORD: return isParamWithoutAnnotStart(tokenOffset + 1); case ELLIPSIS_TOKEN: return true; case IDENTIFIER_TOKEN: return true; default: return false; } } private STNode parseAfterParamType(STNode leadingComma, STNode annots, STNode qualifier, STNode type) { STToken token = peek(); return parseAfterParamType(token.kind, leadingComma, annots, qualifier, type); } private STNode parseAfterParamType(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type) { switch (tokenKind) { case ELLIPSIS_TOKEN: this.currentParamKind = ParserRuleContext.REST_PARAM; switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); STNode paramName = parseVariableName(); return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(leadingComma, annots, qualifier, type, paramName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, leadingComma, annots, qualifier, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, leadingComma, annots, qualifier, type); } } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (this.currentParamKind == ParserRuleContext.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (this.currentParamKind == ParserRuleContext.REQUIRED_PARAM) { this.currentParamKind = ParserRuleContext.DEFAULTABLE_PARAM; switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_RHS, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return false; } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return false; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseReturnTypeDescriptor() { startContext(ParserRuleContext.RETURN_TYPE_DESCRIPTOR); STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { endContext(); return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(); endContext(); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor() { STToken token = peek(); STNode typeDesc = parseTypeDescriptor(token.kind); return parseComplexTypeDescriptor(typeDesc); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc)); case OPEN_BRACKET_TOKEN: return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc)); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTypeDescriptor(SyntaxKind tokenKind) { switch (tokenKind) { case SIMPLE_TYPE: case SERVICE_KEYWORD: return parseSimpleTypeDescriptor(); case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilTypeDescriptor(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptor(solution.tokenKind); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); switch (node.kind) { case SIMPLE_TYPE: case SERVICE_KEYWORD: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @return Parsed node */ private STNode parseFunctionBody() { STToken token = peek(); return parseFunctionBody(token.kind); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @return Parsed node */ private STNode parseFunctionBodyBlock() { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Check whether the given token is an end of a block. * * @param tokenKind STToken to check * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfBlockNode(SyntaxKind tokenKind) { return isEndOfBlockNode(tokenKind, 1); } private boolean isEndOfBlockNode(SyntaxKind tokenKind, int lookahead) { switch (tokenKind) { case EOF_TOKEN: case HASH_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case FUNCTION_KEYWORD: case IMPORT_KEYWORD: case ELSE_KEYWORD: case RESOURCE_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.STATEMENT, lookahead); case AT_TOKEN: lookahead = getNumberOfTokensToAnnotsEnd(); return isEndOfBlockNode(peek(lookahead).kind, lookahead); default: return false; } } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { STToken nexNextToken = peek(2); switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1); default: switch (nexNextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 2); default: return false; } } } private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind, SyntaxKind nextNextTokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); default: switch (nextNextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 2); default: return false; } } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case EQUAL_GT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.ACTION; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter() { STToken token = peek(); return parseRecordBodyCloseDelimiter(token.kind); } private STNode parseRecordBodyCloseDelimiter(SyntaxKind kind) { switch (kind) { case CLOSE_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case CLOSE_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { startContext(ParserRuleContext.RECORD_FIELD); STToken token = peek(); if (token.kind == SyntaxKind.ASTERISK_TOKEN) { STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); } STNode metadata = parseMetaData(); STNode type = parseTypeDescriptor(); STNode fieldOrRestDesc; if (isInclusive) { STNode fieldName = parseVariableName(); fieldOrRestDesc = parseFieldDescriptorRhs(metadata, type, fieldName); } else { fieldOrRestDesc = parseFieldOrRestDescriptorRhs(metadata, type); } endContext(); return fieldOrRestDesc; } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeRefOrPkgRef = consume(); return parseQualifiedIdentifier(typeRefOrPkgRef); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return identifier; } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedIdentifierNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { STToken token = peek(); ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfBlockNode(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } stmts.add(stmt); token = peek(); } return STNodeFactory.createNodeList(stmts); } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind); } private STNode parseStatement(SyntaxKind tokenKind) { STNode annots = null; switch (tokenKind) { case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case IDENTIFIER_TOKEN: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case OPEN_PAREN_TOKEN: break; default: if (isValidLHSExpression(tokenKind)) { break; } if (isEndOfBlockNode(tokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind); } return parseStatement(tokenKind, annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots) { switch (tokenKind) { case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(tokenKind, annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseStamentStartsWithExpr(tokenKind); case IDENTIFIER_TOKEN: return parseStatementStartsWithIdentifier(getAnnotations(annots)); default: if (isEndOfBlockNode(tokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots); } } private STNode getNextNextToken(SyntaxKind tokenKind) { return peek(1).kind == tokenKind ? peek(2) : peek(1); } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar); endContext(); return varDecl; } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: if (isModuleVar) { this.errorHandler.reportMissingTokenError("assignment required"); } assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type, varName, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(SyntaxKind tokenKind) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case IDENTIFIER_TOKEN: case QUALIFIED_IDENTIFIER: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case MEMBER_ACCESS: return isValidLVExpr(((STMemberAccessExpressionNode) expression).containerExpression); default: return false; } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: return parseLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_NAME); case OPEN_PAREN_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(isRhsExpr, allowActions); case TRUE_KEYWORD: case FALSE_KEYWORD: return parseBooleanLiteral(); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case NULL_KEYWORD: return parseNullKeyword(); default: Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions); if (solution.recoveredNode.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseQualifiedIdentifier(solution.recoveredNode); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_PAREN_TOKEN && peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return solution.recoveredNode; } } private STNode parseActionOrExpressionInLhs(STNode lhsExpr) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, lhsExpr, false, true); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) { return lhsExpr; } STNode newLhsExpr; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseAction(tokenKind, lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; default: STNode operator = parseBinaryOperator(); STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr) { STNode openBracket = consume(); STNode keyExpr; if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { this.errorHandler.reportMissingTokenError("missing expression"); keyExpr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { keyExpr = parseExpression(); } STNode closeBracket = parseCloseBracket(); return STNodeFactory.createMemberAccessExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access expression and method call expression. * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(); STNode expr; if (allowActions) { expr = parseActionOrExpression(isRhsExpr); } else { expr = parseExpression(isRhsExpr); } STNode closeParen = parseCloseParenthesis(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case SIMPLE_TYPE: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case HASH_TOKEN: return true; default: return false; } } /** * Parse expressions that references variable or functions at the start of the expression. * * @return Parsed node */ private STNode parseLiteral() { return consume(); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * Parse function call argument list. * * @return Parsed agrs list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind = parseFirstArg(argsList); parseFollowUpArg(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse the first argument of a function call. * * @param argsList Arguments list to which the parsed argument must be added * @return Kind of the argument first argument. */ private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) { startContext(ParserRuleContext.ARG); STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); endContext(); if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); return arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); return SyntaxKind.POSITIONAL_ARG; } } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { startContext(ParserRuleContext.ARG); STNode leadingComma = parseComma(); nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma); endContext(); break; } STNode arg = parseArg(nextToken.kind, leadingComma); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); endContext(); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma); break; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma) { STToken secondToken = peek(2); switch (secondToken.kind) { case EQUAL_TOKEN: STNode argNameOrVarRef = consume(); STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrVarRef, equal, expr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: argNameOrVarRef = consume(); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrVarRef); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); return STNodeFactory.createPositionalArgumentNode(leadingComma, expr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); STToken nextToken = peek(1); STToken nextNextToken = peek(2); while (!isEndOfObjectTypeNode(nextToken.kind, nextNextToken.kind)) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode field = parseObjectMember(nextToken.kind); endContext(); if (field == null) { break; } objectMembers.add(field); nextToken = peek(1); nextNextToken = peek(2); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case IDENTIFIER_TOKEN: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: metadata = createEmptyMetadata(); break; case HASH_TOKEN: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (isEndOfObjectTypeNode(solution.tokenKind, nextTokenKind)) { return null; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; case IDENTIFIER_TOKEN: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (isEndOfObjectTypeNode(solution.tokenKind, nextTokenKind)) { return null; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (visibilityQualifiers.kind != SyntaxKind.NONE) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFunctionDefinition(metadata, methodQualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse boolean literal. * * @return Parsed node */ private STNode parseBooleanLiteral() { STToken token = peek(); switch (token.kind) { case TRUE_KEYWORD: case FALSE_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.BOOLEAN_LITERAL); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STNode semicolon; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { leadingComma = parseComma(); field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case AT_TOKEN: case HASH_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: case SIMPLE_TYPE: return true; default: return false; } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfListenersList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfListenersList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfListenersList(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case AT_TOKEN: case HASH_TOKEN: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case SIMPLE_TYPE: return true; default: return false; } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case HASH_TOKEN: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFunctionDefinition(metadata, resourceKeyword); case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, STNodeFactory.createEmptyNode()); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return false; case ON_KEYWORD: return true; default: ParserRuleContext sol = this.errorHandler.findBestPath(currentContext); return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE; } case ON_KEYWORD: return true; default: Solution sol = recover(peek(), ParserRuleContext.STATEMENT); return sol.tokenKind == SyntaxKind.CLOSE_BRACE_TOKEN; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case SIMPLE_TYPE: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = typeOrVarName; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param typeDescriptorNode * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(typeDescriptorNode, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); STToken nextToken; switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: nextToken = peek(2); if (nextToken.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); } return STNodeFactory.createEmptyNode(); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); STNode annotValue = parseMappingConstructorExpr(); return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STToken nextToken = peek(); return parseMetaData(nextToken.kind); } private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case HASH_TOKEN: consume(); docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Get the number of tokens to skip to reach the end of annotations. * * @return Number of tokens to skip to reach the end of annotations */ private int getNumberOfTokensToAnnotsEnd() { STToken nextToken; int lookahead = 0; while (true) { nextToken = peek(lookahead); switch (nextToken.kind) { case EOF_TOKEN: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case IMPORT_KEYWORD: case SERVICE_KEYWORD: return lookahead; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(lookahead)) { return lookahead; } default: lookahead++; break; } } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { startContext(ParserRuleContext.TYPE_TEST_EXPRESSION); STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(); endContext(); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER); STNode identifier = parseStatementStartIdentifier(); STToken nextToken = peek(); STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier); endContext(); return stmt; } private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) { return parseStatementStartsWithIdentifier(peek().kind, annots, identifier); } private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode varName = parseVariableName(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, identifier, varName, false); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStamentStartWithExpr(nextTokenKind, identifier); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseActionOrExpressionInLhs(identifier); return parseStamentStartWithExpr(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier); } } /** * Parse statement which is only consists of an action or expression. * * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseStamentStartsWithExpr(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpression(nextTokenKind); STNode stmt = parseStamentStartWithExpr(expression); endContext(); return stmt; } /** * Parse statements that starts with an expression. * * @return Parsed node */ private STNode parseStamentStartWithExpr(STNode expression) { STToken nextToken = peek(); return parseStamentStartWithExpr(nextToken.kind, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Parsed node */ private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStamentStartWithExpr(solution.tokenKind, expression); } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } private STNode parseAction(SyntaxKind tokenKind, STNode lhsExpr) { switch (tokenKind) { case RIGHT_ARROW_TOKEN: return parseRemoteMethodCallAction(lhsExpr); default: return null; } } private STNode parseRemoteMethodCallAction(STNode expression) { STNode rightArrow = parseRightArrow(); STNode methodName = parseFunctionName(); STNode openParenToken = parseOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, methodName, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Check whether this is a valid lhs expression. * * @param tokenKind Kind of the next token * @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise */ private boolean isValidLHSExpression(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; case PLUS_TOKEN: case MINUS_TOKEN: return !isCompoundBinaryOperator(tokenKind); case OPEN_PAREN_TOKEN: default: return false; } } /** * Parse null-keyword. * * @return null-keyword node */ private STNode parseNullKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.NULL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.NULL_KEYWORD); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ }
There is really no problem with this repair. But I think a better way is: 1. Modify MVColumnOneChildPattern 2. ``` if (isReplay) { ignore pattern } else {match pattern} ```
public void analyzeSelectClause() throws AnalysisException { SelectList selectList = selectStmt.getSelectList(); if (selectList.getItems().isEmpty()) { throw new AnalysisException("The materialized view must contain at least one column"); } boolean meetAggregate = false; Set<String> mvColumnNameSet = Sets.newHashSet(); /** * 1. The columns of mv must be a single column or a aggregate column without any calculate. * Also the children of aggregate column must be a single column without any calculate. * For example: * a, sum(b) is legal. * a+b, sum(a+b) is illegal. * 2. The SUM, MIN, MAX function is supported. The other function will be supported in the future. * 3. The aggregate column must be declared after the single column. */ for (int i = 0; i < selectList.getItems().size(); i++) { SelectListItem selectListItem = selectList.getItems().get(i); Expr selectListItemExpr = selectListItem.getExpr(); if (!(selectListItemExpr instanceof SlotRef) && !(selectListItemExpr instanceof FunctionCallExpr)) { throw new AnalysisException("The materialized view only support the single column or function expr. " + "Error column: " + selectListItemExpr.toSql()); } if (selectListItemExpr instanceof SlotRef) { if (meetAggregate) { throw new AnalysisException("The aggregate column should be after the single column"); } SlotRef slotRef = (SlotRef) selectListItemExpr; String columnName = slotRef.getColumnName().toLowerCase(); if (!mvColumnNameSet.add(columnName)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName); } MVColumnItem mvColumnItem = new MVColumnItem(columnName, slotRef.getType()); mvColumnItemList.add(mvColumnItem); } else if (selectListItemExpr instanceof FunctionCallExpr) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) selectListItemExpr; String functionName = functionCallExpr.getFnName().getFunction(); if (!isReplay && functionName.toLowerCase().equals("count") && functionCallExpr.isDistinct()) { throw new AnalysisException( "Materialized view does not support distinct function " + functionCallExpr.toSqlImpl()); } MVColumnPattern mvColumnPattern = FN_NAME_TO_PATTERN.get(functionName.toLowerCase()); if (mvColumnPattern == null) { throw new AnalysisException( "Materialized view does not support this function:" + functionCallExpr.toSqlImpl()); } if (!mvColumnPattern.match(functionCallExpr)) { throw new AnalysisException( "The function " + functionName + " must match pattern:" + mvColumnPattern.toString()); } List<SlotRef> slots = new ArrayList<>(); functionCallExpr.collect(SlotRef.class, slots); Preconditions.checkArgument(slots.size() == 1); String columnName = slots.get(0).getColumnName().toLowerCase(); if (!mvColumnNameSet.add(columnName)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName); } if (beginIndexOfAggregation == -1) { beginIndexOfAggregation = i; } meetAggregate = true; mvColumnItemList.add(buildMVColumnItem(functionCallExpr)); } } if (beginIndexOfAggregation == 0) { throw new AnalysisException("The materialized view must contain at least one key column"); } }
if (!isReplay && functionName.toLowerCase().equals("count") && functionCallExpr.isDistinct()) {
public void analyzeSelectClause() throws AnalysisException { SelectList selectList = selectStmt.getSelectList(); if (selectList.getItems().isEmpty()) { throw new AnalysisException("The materialized view must contain at least one column"); } boolean meetAggregate = false; Set<String> mvColumnNameSet = Sets.newHashSet(); /** * 1. The columns of mv must be a single column or a aggregate column without any calculate. * Also the children of aggregate column must be a single column without any calculate. * For example: * a, sum(b) is legal. * a+b, sum(a+b) is illegal. * 2. The SUM, MIN, MAX function is supported. The other function will be supported in the future. * 3. The aggregate column must be declared after the single column. */ for (int i = 0; i < selectList.getItems().size(); i++) { SelectListItem selectListItem = selectList.getItems().get(i); Expr selectListItemExpr = selectListItem.getExpr(); if (!(selectListItemExpr instanceof SlotRef) && !(selectListItemExpr instanceof FunctionCallExpr)) { throw new AnalysisException("The materialized view only support the single column or function expr. " + "Error column: " + selectListItemExpr.toSql()); } if (selectListItemExpr instanceof SlotRef) { if (meetAggregate) { throw new AnalysisException("The aggregate column should be after the single column"); } SlotRef slotRef = (SlotRef) selectListItemExpr; String columnName = slotRef.getColumnName().toLowerCase(); if (!mvColumnNameSet.add(columnName)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName); } MVColumnItem mvColumnItem = new MVColumnItem(columnName, slotRef.getType()); mvColumnItemList.add(mvColumnItem); } else if (selectListItemExpr instanceof FunctionCallExpr) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) selectListItemExpr; String functionName = functionCallExpr.getFnName().getFunction(); if (!isReplay) { MVColumnPattern mvColumnPattern = FN_NAME_TO_PATTERN.get(functionName.toLowerCase()); if (mvColumnPattern == null) { throw new AnalysisException( "Materialized view does not support this function:" + functionCallExpr.toSqlImpl()); } if (!mvColumnPattern.match(functionCallExpr)) { throw new AnalysisException( "The function " + functionName + " must match pattern:" + mvColumnPattern.toString()); } } List<SlotRef> slots = new ArrayList<>(); functionCallExpr.collect(SlotRef.class, slots); Preconditions.checkArgument(slots.size() == 1); String columnName = slots.get(0).getColumnName().toLowerCase(); if (!mvColumnNameSet.add(columnName)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName); } if (beginIndexOfAggregation == -1) { beginIndexOfAggregation = i; } meetAggregate = true; mvColumnItemList.add(buildMVColumnItem(functionCallExpr)); } } if (beginIndexOfAggregation == 0) { throw new AnalysisException("The materialized view must contain at least one key column"); } }
class CreateMaterializedViewStmt extends DdlStmt { public static final String MATERIALIZED_VIEW_NAME_PREFIX = "mv_"; public static final Map<String, MVColumnPattern> FN_NAME_TO_PATTERN; static { FN_NAME_TO_PATTERN = Maps.newHashMap(); FN_NAME_TO_PATTERN.put(AggregateType.SUM.name().toLowerCase(), new MVColumnOneChildPattern(AggregateType.SUM.name().toLowerCase())); FN_NAME_TO_PATTERN.put(AggregateType.MIN.name().toLowerCase(), new MVColumnOneChildPattern(AggregateType.MIN.name().toLowerCase())); FN_NAME_TO_PATTERN.put(AggregateType.MAX.name().toLowerCase(), new MVColumnOneChildPattern(AggregateType.MAX.name().toLowerCase())); FN_NAME_TO_PATTERN.put(FunctionSet.COUNT, new MVColumnOneChildPattern(FunctionSet.COUNT)); FN_NAME_TO_PATTERN.put(FunctionSet.BITMAP_UNION, new MVColumnBitmapUnionPattern()); FN_NAME_TO_PATTERN.put(FunctionSet.HLL_UNION, new MVColumnHLLUnionPattern()); } private String mvName; private SelectStmt selectStmt; private Map<String, String> properties; private int beginIndexOfAggregation = -1; /** * origin stmt: select k1, k2, v1, sum(v2) from base_table group by k1, k2, v1 * mvColumnItemList: [k1: {name: k1, isKey: true, aggType: null, isAggregationTypeImplicit: false}, * k2: {name: k2, isKey: true, aggType: null, isAggregationTypeImplicit: false}, * v1: {name: v1, isKey: true, aggType: null, isAggregationTypeImplicit: false}, * v2: {name: v2, isKey: false, aggType: sum, isAggregationTypeImplicit: false}] * This order of mvColumnItemList is meaningful. */ private List<MVColumnItem> mvColumnItemList = Lists.newArrayList(); private String baseIndexName; private String dbName; private KeysType mvKeysType = KeysType.DUP_KEYS; private boolean isReplay = false; public CreateMaterializedViewStmt(String mvName, SelectStmt selectStmt, Map<String, String> properties) { this.mvName = mvName; this.selectStmt = selectStmt; this.properties = properties; } public void setIsReplay(boolean isReplay) { this.isReplay = isReplay; } public String getMVName() { return mvName; } public SelectStmt getSelectStmt() { return selectStmt; } public List<MVColumnItem> getMVColumnItemList() { return mvColumnItemList; } public String getBaseIndexName() { return baseIndexName; } public Map<String, String> getProperties() { return properties; } public String getDBName() { return dbName; } public KeysType getMVKeysType() { return mvKeysType; } @Override public void analyze(Analyzer analyzer) throws UserException { if (!Config.enable_materialized_view) { throw new AnalysisException("The materialized view is disabled"); } super.analyze(analyzer); FeNameFormat.checkTableName(mvName); selectStmt.forbiddenMVRewrite(); selectStmt.analyze(analyzer); if (selectStmt.getAggInfo() != null) { mvKeysType = KeysType.AGG_KEYS; } analyzeSelectClause(); analyzeFromClause(); if (selectStmt.getWhereClause() != null) { throw new AnalysisException("The where clause is not supported in add materialized view clause, expr:" + selectStmt.getWhereClause().toSql()); } if (selectStmt.getHavingPred() != null) { throw new AnalysisException("The having clause is not supported in add materialized view clause, expr:" + selectStmt.getHavingPred().toSql()); } analyzeOrderByClause(); if (selectStmt.getLimit() != -1) { throw new AnalysisException("The limit clause is not supported in add materialized view clause, expr:" + " limit " + selectStmt.getLimit()); } } private void analyzeFromClause() throws AnalysisException { List<TableRef> tableRefList = selectStmt.getTableRefs(); if (tableRefList.size() != 1) { throw new AnalysisException("The materialized view only support one table in from clause."); } TableName tableName = tableRefList.get(0).getName(); baseIndexName = tableName.getTbl(); dbName = tableName.getDb(); } private void analyzeOrderByClause() throws AnalysisException { if (selectStmt.getOrderByElements() == null) { supplyOrderColumn(); return; } List<OrderByElement> orderByElements = selectStmt.getOrderByElements(); if (orderByElements.size() > mvColumnItemList.size()) { throw new AnalysisException("The number of columns in order clause must be less than " + "the number of " + "columns in select clause"); } if (beginIndexOfAggregation != -1 && (orderByElements.size() != (beginIndexOfAggregation))) { throw new AnalysisException("The key of columns in mv must be all of group by columns"); } for (int i = 0; i < orderByElements.size(); i++) { Expr orderByElement = orderByElements.get(i).getExpr(); if (!(orderByElement instanceof SlotRef)) { throw new AnalysisException("The column in order clause must be original column without calculation. " + "Error column: " + orderByElement.toSql()); } MVColumnItem mvColumnItem = mvColumnItemList.get(i); SlotRef slotRef = (SlotRef) orderByElement; if (!mvColumnItem.getName().equalsIgnoreCase(slotRef.getColumnName())) { throw new AnalysisException("The order of columns in order by clause must be same as " + "the order of columns in select list"); } Preconditions.checkState(mvColumnItem.getAggregationType() == null); mvColumnItem.setIsKey(true); } for (MVColumnItem mvColumnItem : mvColumnItemList) { if (mvColumnItem.isKey()) { continue; } if (mvColumnItem.getAggregationType() != null) { break; } mvColumnItem.setAggregationType(AggregateType.NONE, true); } } /* This function is used to supply order by columns and calculate short key count */ private void supplyOrderColumn() throws AnalysisException { /** * The keys type of Materialized view is aggregation. * All of group by columns are keys of materialized view. */ if (mvKeysType == KeysType.AGG_KEYS) { for (MVColumnItem mvColumnItem : mvColumnItemList) { if (mvColumnItem.getAggregationType() != null) { break; } mvColumnItem.setIsKey(true); } } else if (mvKeysType == KeysType.DUP_KEYS) { /** * There is no aggregation function in materialized view. * Supplement key of MV columns * The key is same as the short key in duplicate table * For example: select k1, k2 ... kn from t1 * The default key columns are first 36 bytes of the columns in define order. * If the number of columns in the first 36 is more than 3, the first 3 columns will be used. * column: k1, k2, k3. The key is true. * Supplement non-key of MV columns * column: k4... kn. The key is false, aggregation type is none, isAggregationTypeImplicit is true. */ int theBeginIndexOfValue = 0; int keySizeByte = 0; for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) { MVColumnItem column = mvColumnItemList.get(theBeginIndexOfValue); keySizeByte += column.getType().getIndexSize(); if (theBeginIndexOfValue + 1 > FeConstants.shortkey_max_column_count || keySizeByte > FeConstants.shortkey_maxsize_bytes) { if (theBeginIndexOfValue == 0 && column.getType().getPrimitiveType().isCharFamily()) { column.setIsKey(true); theBeginIndexOfValue++; } break; } if (column.getType().isFloatingPointType()) { break; } if (column.getType().getPrimitiveType() == PrimitiveType.VARCHAR) { column.setIsKey(true); theBeginIndexOfValue++; break; } column.setIsKey(true); } if (theBeginIndexOfValue == 0) { throw new AnalysisException("The first column could not be float or double type, use decimal instead"); } for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) { MVColumnItem mvColumnItem = mvColumnItemList.get(theBeginIndexOfValue); mvColumnItem.setAggregationType(AggregateType.NONE, true); } } } private MVColumnItem buildMVColumnItem(FunctionCallExpr functionCallExpr) throws AnalysisException { String functionName = functionCallExpr.getFnName().getFunction(); List<SlotRef> slots = new ArrayList<>(); functionCallExpr.collect(SlotRef.class, slots); Preconditions.checkArgument(slots.size() == 1); SlotRef baseColumnRef = slots.get(0); String baseColumnName = baseColumnRef.getColumnName().toLowerCase(); Column baseColumn = baseColumnRef.getColumn(); Preconditions.checkNotNull(baseColumn); Type baseType = baseColumn.getOriginType(); Expr functionChild0 = functionCallExpr.getChild(0); String mvColumnName; AggregateType mvAggregateType; Expr defineExpr = null; Type type; switch (functionName.toLowerCase()) { case "sum": mvColumnName = baseColumnName; mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); PrimitiveType baseColumnType = baseColumnRef.getType().getPrimitiveType(); if (baseColumnType == PrimitiveType.TINYINT || baseColumnType == PrimitiveType.SMALLINT || baseColumnType == PrimitiveType.INT) { type = Type.BIGINT; } else if (baseColumnType == PrimitiveType.FLOAT) { type = Type.DOUBLE; } else { type = baseType; } break; case "min": case "max": mvColumnName = baseColumnName; mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); type = baseType; break; case FunctionSet.BITMAP_UNION: if (baseColumnRef.getType().getPrimitiveType() == PrimitiveType.BITMAP) { mvColumnName = baseColumnName; } else { mvColumnName = mvColumnBuilder(functionName, baseColumnName); defineExpr = functionChild0; } mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); type = Type.BITMAP; break; case FunctionSet.HLL_UNION: if (baseColumnRef.getType().getPrimitiveType() == PrimitiveType.HLL) { mvColumnName = baseColumnName; } else { mvColumnName = mvColumnBuilder(functionName, baseColumnName); defineExpr = functionChild0; } mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); type = Type.HLL; break; case FunctionSet.COUNT: mvColumnName = mvColumnBuilder(functionName, baseColumnName); mvAggregateType = AggregateType.SUM; defineExpr = new CaseExpr(null, Lists.newArrayList(new CaseWhenClause( new IsNullPredicate(baseColumnRef, false), new IntLiteral(0, Type.BIGINT))), new IntLiteral(1, Type.BIGINT)); type = Type.BIGINT; break; default: throw new AnalysisException("Unsupported function:" + functionName); } MVColumnItem mvColumnItem = new MVColumnItem(mvColumnName, type, mvAggregateType, false, defineExpr, baseColumnName); return mvColumnItem; } public Map<String, Expr> parseDefineExprWithoutAnalyze() throws AnalysisException { Map<String, Expr> result = Maps.newHashMap(); SelectList selectList = selectStmt.getSelectList(); for (SelectListItem selectListItem : selectList.getItems()) { Expr selectListItemExpr = selectListItem.getExpr(); if (selectListItemExpr instanceof SlotRef) { SlotRef slotRef = (SlotRef) selectListItemExpr; result.put(slotRef.getColumnName(), null); } else if (selectListItemExpr instanceof FunctionCallExpr) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) selectListItemExpr; List<SlotRef> slots = new ArrayList<>(); functionCallExpr.collect(SlotRef.class, slots); Preconditions.checkArgument(slots.size() == 1); String baseColumnName = slots.get(0).getColumnName(); String functionName = functionCallExpr.getFnName().getFunction(); SlotRef baseSlotRef = slots.get(0); switch (functionName.toLowerCase()) { case "sum": case "min": case "max": result.put(baseColumnName, null); break; case FunctionSet.BITMAP_UNION: if (functionCallExpr.getChild(0) instanceof FunctionCallExpr) { CastExpr castExpr = new CastExpr(new TypeDef(Type.VARCHAR), baseSlotRef); List<Expr> params = Lists.newArrayList(); params.add(castExpr); FunctionCallExpr defineExpr = new FunctionCallExpr(FunctionSet.TO_BITMAP, params); result.put(mvColumnBuilder(functionName, baseColumnName), defineExpr); } else { result.put(baseColumnName, null); } break; case FunctionSet.HLL_UNION: if (functionCallExpr.getChild(0) instanceof FunctionCallExpr) { CastExpr castExpr = new CastExpr(new TypeDef(Type.VARCHAR), baseSlotRef); List<Expr> params = Lists.newArrayList(); params.add(castExpr); FunctionCallExpr defineExpr = new FunctionCallExpr(FunctionSet.HLL_HASH, params); result.put(mvColumnBuilder(functionName, baseColumnName), defineExpr); } else { result.put(baseColumnName, null); } break; case FunctionSet.COUNT: Expr defineExpr = new CaseExpr(null, Lists.newArrayList( new CaseWhenClause(new IsNullPredicate(slots.get(0), false), new IntLiteral(0, Type.BIGINT))), new IntLiteral(1, Type.BIGINT)); result.put(mvColumnBuilder(functionName, baseColumnName), defineExpr); break; default: throw new AnalysisException("Unsupported function:" + functionName); } } else { throw new AnalysisException("Unsupported select item:" + selectListItem.toSql()); } } return result; } public static String mvColumnBuilder(String functionName, String sourceColumnName) { return new StringBuilder().append(MATERIALIZED_VIEW_NAME_PREFIX).append(functionName).append("_") .append(sourceColumnName).toString(); } @Override public String toSql() { return null; } }
class CreateMaterializedViewStmt extends DdlStmt { public static final String MATERIALIZED_VIEW_NAME_PREFIX = "mv_"; public static final Map<String, MVColumnPattern> FN_NAME_TO_PATTERN; static { FN_NAME_TO_PATTERN = Maps.newHashMap(); FN_NAME_TO_PATTERN.put(AggregateType.SUM.name().toLowerCase(), new MVColumnOneChildPattern(AggregateType.SUM.name().toLowerCase())); FN_NAME_TO_PATTERN.put(AggregateType.MIN.name().toLowerCase(), new MVColumnOneChildPattern(AggregateType.MIN.name().toLowerCase())); FN_NAME_TO_PATTERN.put(AggregateType.MAX.name().toLowerCase(), new MVColumnOneChildPattern(AggregateType.MAX.name().toLowerCase())); FN_NAME_TO_PATTERN.put(FunctionSet.COUNT, new MVColumnOneChildPattern(FunctionSet.COUNT)); FN_NAME_TO_PATTERN.put(FunctionSet.BITMAP_UNION, new MVColumnBitmapUnionPattern()); FN_NAME_TO_PATTERN.put(FunctionSet.HLL_UNION, new MVColumnHLLUnionPattern()); } private String mvName; private SelectStmt selectStmt; private Map<String, String> properties; private int beginIndexOfAggregation = -1; /** * origin stmt: select k1, k2, v1, sum(v2) from base_table group by k1, k2, v1 * mvColumnItemList: [k1: {name: k1, isKey: true, aggType: null, isAggregationTypeImplicit: false}, * k2: {name: k2, isKey: true, aggType: null, isAggregationTypeImplicit: false}, * v1: {name: v1, isKey: true, aggType: null, isAggregationTypeImplicit: false}, * v2: {name: v2, isKey: false, aggType: sum, isAggregationTypeImplicit: false}] * This order of mvColumnItemList is meaningful. */ private List<MVColumnItem> mvColumnItemList = Lists.newArrayList(); private String baseIndexName; private String dbName; private KeysType mvKeysType = KeysType.DUP_KEYS; private boolean isReplay = false; public CreateMaterializedViewStmt(String mvName, SelectStmt selectStmt, Map<String, String> properties) { this.mvName = mvName; this.selectStmt = selectStmt; this.properties = properties; } public void setIsReplay(boolean isReplay) { this.isReplay = isReplay; } public String getMVName() { return mvName; } public SelectStmt getSelectStmt() { return selectStmt; } public List<MVColumnItem> getMVColumnItemList() { return mvColumnItemList; } public String getBaseIndexName() { return baseIndexName; } public Map<String, String> getProperties() { return properties; } public String getDBName() { return dbName; } public KeysType getMVKeysType() { return mvKeysType; } @Override public void analyze(Analyzer analyzer) throws UserException { if (!Config.enable_materialized_view) { throw new AnalysisException("The materialized view is disabled"); } super.analyze(analyzer); FeNameFormat.checkTableName(mvName); selectStmt.forbiddenMVRewrite(); selectStmt.analyze(analyzer); if (selectStmt.getAggInfo() != null) { mvKeysType = KeysType.AGG_KEYS; } analyzeSelectClause(); analyzeFromClause(); if (selectStmt.getWhereClause() != null) { throw new AnalysisException("The where clause is not supported in add materialized view clause, expr:" + selectStmt.getWhereClause().toSql()); } if (selectStmt.getHavingPred() != null) { throw new AnalysisException("The having clause is not supported in add materialized view clause, expr:" + selectStmt.getHavingPred().toSql()); } analyzeOrderByClause(); if (selectStmt.getLimit() != -1) { throw new AnalysisException("The limit clause is not supported in add materialized view clause, expr:" + " limit " + selectStmt.getLimit()); } } private void analyzeFromClause() throws AnalysisException { List<TableRef> tableRefList = selectStmt.getTableRefs(); if (tableRefList.size() != 1) { throw new AnalysisException("The materialized view only support one table in from clause."); } TableName tableName = tableRefList.get(0).getName(); baseIndexName = tableName.getTbl(); dbName = tableName.getDb(); } private void analyzeOrderByClause() throws AnalysisException { if (selectStmt.getOrderByElements() == null) { supplyOrderColumn(); return; } List<OrderByElement> orderByElements = selectStmt.getOrderByElements(); if (orderByElements.size() > mvColumnItemList.size()) { throw new AnalysisException("The number of columns in order clause must be less than " + "the number of " + "columns in select clause"); } if (beginIndexOfAggregation != -1 && (orderByElements.size() != (beginIndexOfAggregation))) { throw new AnalysisException("The key of columns in mv must be all of group by columns"); } for (int i = 0; i < orderByElements.size(); i++) { Expr orderByElement = orderByElements.get(i).getExpr(); if (!(orderByElement instanceof SlotRef)) { throw new AnalysisException("The column in order clause must be original column without calculation. " + "Error column: " + orderByElement.toSql()); } MVColumnItem mvColumnItem = mvColumnItemList.get(i); SlotRef slotRef = (SlotRef) orderByElement; if (!mvColumnItem.getName().equalsIgnoreCase(slotRef.getColumnName())) { throw new AnalysisException("The order of columns in order by clause must be same as " + "the order of columns in select list"); } Preconditions.checkState(mvColumnItem.getAggregationType() == null); mvColumnItem.setIsKey(true); } for (MVColumnItem mvColumnItem : mvColumnItemList) { if (mvColumnItem.isKey()) { continue; } if (mvColumnItem.getAggregationType() != null) { break; } mvColumnItem.setAggregationType(AggregateType.NONE, true); } } /* This function is used to supply order by columns and calculate short key count */ private void supplyOrderColumn() throws AnalysisException { /** * The keys type of Materialized view is aggregation. * All of group by columns are keys of materialized view. */ if (mvKeysType == KeysType.AGG_KEYS) { for (MVColumnItem mvColumnItem : mvColumnItemList) { if (mvColumnItem.getAggregationType() != null) { break; } mvColumnItem.setIsKey(true); } } else if (mvKeysType == KeysType.DUP_KEYS) { /** * There is no aggregation function in materialized view. * Supplement key of MV columns * The key is same as the short key in duplicate table * For example: select k1, k2 ... kn from t1 * The default key columns are first 36 bytes of the columns in define order. * If the number of columns in the first 36 is more than 3, the first 3 columns will be used. * column: k1, k2, k3. The key is true. * Supplement non-key of MV columns * column: k4... kn. The key is false, aggregation type is none, isAggregationTypeImplicit is true. */ int theBeginIndexOfValue = 0; int keySizeByte = 0; for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) { MVColumnItem column = mvColumnItemList.get(theBeginIndexOfValue); keySizeByte += column.getType().getIndexSize(); if (theBeginIndexOfValue + 1 > FeConstants.shortkey_max_column_count || keySizeByte > FeConstants.shortkey_maxsize_bytes) { if (theBeginIndexOfValue == 0 && column.getType().getPrimitiveType().isCharFamily()) { column.setIsKey(true); theBeginIndexOfValue++; } break; } if (column.getType().isFloatingPointType()) { break; } if (column.getType().getPrimitiveType() == PrimitiveType.VARCHAR) { column.setIsKey(true); theBeginIndexOfValue++; break; } column.setIsKey(true); } if (theBeginIndexOfValue == 0) { throw new AnalysisException("The first column could not be float or double type, use decimal instead"); } for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) { MVColumnItem mvColumnItem = mvColumnItemList.get(theBeginIndexOfValue); mvColumnItem.setAggregationType(AggregateType.NONE, true); } } } private MVColumnItem buildMVColumnItem(FunctionCallExpr functionCallExpr) throws AnalysisException { String functionName = functionCallExpr.getFnName().getFunction(); List<SlotRef> slots = new ArrayList<>(); functionCallExpr.collect(SlotRef.class, slots); Preconditions.checkArgument(slots.size() == 1); SlotRef baseColumnRef = slots.get(0); String baseColumnName = baseColumnRef.getColumnName().toLowerCase(); Column baseColumn = baseColumnRef.getColumn(); Preconditions.checkNotNull(baseColumn); Type baseType = baseColumn.getOriginType(); Expr functionChild0 = functionCallExpr.getChild(0); String mvColumnName; AggregateType mvAggregateType; Expr defineExpr = null; Type type; switch (functionName.toLowerCase()) { case "sum": mvColumnName = baseColumnName; mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); PrimitiveType baseColumnType = baseColumnRef.getType().getPrimitiveType(); if (baseColumnType == PrimitiveType.TINYINT || baseColumnType == PrimitiveType.SMALLINT || baseColumnType == PrimitiveType.INT) { type = Type.BIGINT; } else if (baseColumnType == PrimitiveType.FLOAT) { type = Type.DOUBLE; } else { type = baseType; } break; case "min": case "max": mvColumnName = baseColumnName; mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); type = baseType; break; case FunctionSet.BITMAP_UNION: if (baseColumnRef.getType().getPrimitiveType() == PrimitiveType.BITMAP) { mvColumnName = baseColumnName; } else { mvColumnName = mvColumnBuilder(functionName, baseColumnName); defineExpr = functionChild0; } mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); type = Type.BITMAP; break; case FunctionSet.HLL_UNION: if (baseColumnRef.getType().getPrimitiveType() == PrimitiveType.HLL) { mvColumnName = baseColumnName; } else { mvColumnName = mvColumnBuilder(functionName, baseColumnName); defineExpr = functionChild0; } mvAggregateType = AggregateType.valueOf(functionName.toUpperCase()); type = Type.HLL; break; case FunctionSet.COUNT: mvColumnName = mvColumnBuilder(functionName, baseColumnName); mvAggregateType = AggregateType.SUM; defineExpr = new CaseExpr(null, Lists.newArrayList(new CaseWhenClause( new IsNullPredicate(baseColumnRef, false), new IntLiteral(0, Type.BIGINT))), new IntLiteral(1, Type.BIGINT)); type = Type.BIGINT; break; default: throw new AnalysisException("Unsupported function:" + functionName); } MVColumnItem mvColumnItem = new MVColumnItem(mvColumnName, type, mvAggregateType, false, defineExpr, baseColumnName); return mvColumnItem; } public Map<String, Expr> parseDefineExprWithoutAnalyze() throws AnalysisException { Map<String, Expr> result = Maps.newHashMap(); SelectList selectList = selectStmt.getSelectList(); for (SelectListItem selectListItem : selectList.getItems()) { Expr selectListItemExpr = selectListItem.getExpr(); if (selectListItemExpr instanceof SlotRef) { SlotRef slotRef = (SlotRef) selectListItemExpr; result.put(slotRef.getColumnName(), null); } else if (selectListItemExpr instanceof FunctionCallExpr) { FunctionCallExpr functionCallExpr = (FunctionCallExpr) selectListItemExpr; List<SlotRef> slots = new ArrayList<>(); functionCallExpr.collect(SlotRef.class, slots); Preconditions.checkArgument(slots.size() == 1); String baseColumnName = slots.get(0).getColumnName(); String functionName = functionCallExpr.getFnName().getFunction(); SlotRef baseSlotRef = slots.get(0); switch (functionName.toLowerCase()) { case "sum": case "min": case "max": result.put(baseColumnName, null); break; case FunctionSet.BITMAP_UNION: if (functionCallExpr.getChild(0) instanceof FunctionCallExpr) { CastExpr castExpr = new CastExpr(new TypeDef(Type.VARCHAR), baseSlotRef); List<Expr> params = Lists.newArrayList(); params.add(castExpr); FunctionCallExpr defineExpr = new FunctionCallExpr(FunctionSet.TO_BITMAP, params); result.put(mvColumnBuilder(functionName, baseColumnName), defineExpr); } else { result.put(baseColumnName, null); } break; case FunctionSet.HLL_UNION: if (functionCallExpr.getChild(0) instanceof FunctionCallExpr) { CastExpr castExpr = new CastExpr(new TypeDef(Type.VARCHAR), baseSlotRef); List<Expr> params = Lists.newArrayList(); params.add(castExpr); FunctionCallExpr defineExpr = new FunctionCallExpr(FunctionSet.HLL_HASH, params); result.put(mvColumnBuilder(functionName, baseColumnName), defineExpr); } else { result.put(baseColumnName, null); } break; case FunctionSet.COUNT: Expr defineExpr = new CaseExpr(null, Lists.newArrayList( new CaseWhenClause(new IsNullPredicate(slots.get(0), false), new IntLiteral(0, Type.BIGINT))), new IntLiteral(1, Type.BIGINT)); result.put(mvColumnBuilder(functionName, baseColumnName), defineExpr); break; default: throw new AnalysisException("Unsupported function:" + functionName); } } else { throw new AnalysisException("Unsupported select item:" + selectListItem.toSql()); } } return result; } public static String mvColumnBuilder(String functionName, String sourceColumnName) { return new StringBuilder().append(MATERIALIZED_VIEW_NAME_PREFIX).append(functionName).append("_") .append(sourceColumnName).toString(); } @Override public String toSql() { return null; } }
Technically, there is no problem of passing beans there.
public boolean implies(Permission permission) { if (permission instanceof WorkdayPermission) { WorkdayPermission that = (WorkdayPermission) permission; if (that.getName().equals("worker") && that.getActions().contains("adult")) { final WorkdayEvaluator workdayEvaluator = Arc.container().instance(WorkdayEvaluator.class).get(); return workdayEvaluator.isWorkday(that.day); } } return false; }
if (that.getName().equals("worker") && that.getActions().contains("adult")) {
public boolean implies(Permission permission) { if (permission instanceof WorkdayPermission) { WorkdayPermission that = (WorkdayPermission) permission; if (that.getName().equals("worker") && that.getActions().contains("adult")) { final WorkdayEvaluator workdayEvaluator = Arc.container().instance(WorkdayEvaluator.class).get(); return workdayEvaluator.isWorkday(that.day); } } return false; }
class must have a formal parameter {@link String}
class must have a formal parameter {@link String}
@cheese8 What is the difference between this change and the original logic?
public Collection<SubstitutableColumnNameToken> generateSQLTokens(final SQLStatementContext sqlStatementContext) { Preconditions.checkState(((WhereAvailable) sqlStatementContext).getWhere().isPresent()); Collection<SubstitutableColumnNameToken> result = new LinkedHashSet<>(); ExpressionSegment expression = ((WhereAvailable) sqlStatementContext).getWhere().get().getExpr(); Collection<AndPredicate> andPredicates = ExpressionExtractUtil.getAndPredicates(expression); Map<String, String> columnTableNames = getColumnTableNames(sqlStatementContext, andPredicates); andPredicates.forEach(each -> result.addAll(generateSQLTokens(each.getPredicates(), columnTableNames))); return result; }
andPredicates.forEach(each -> result.addAll(generateSQLTokens(each.getPredicates(), columnTableNames)));
public Collection<SubstitutableColumnNameToken> generateSQLTokens(final SQLStatementContext sqlStatementContext) { Collection<SubstitutableColumnNameToken> result = new LinkedHashSet<>(); Collection<AndPredicate> andPredicates = new LinkedHashSet<>(); if (isGenerateSQLTokenForEncryptOnWhereAvailable(sqlStatementContext)) { ExpressionSegment expression = ((WhereAvailable) sqlStatementContext).getWhere().get().getExpr(); andPredicates.addAll(ExpressionExtractUtil.getAndPredicates(expression)); } Collection<WhereSegment> whereSegments = Collections.emptyList(); if (sqlStatementContext instanceof SelectStatementContext) { whereSegments = WhereExtractUtil.getJoinWhereSegments((SelectStatement) sqlStatementContext.getSqlStatement()); andPredicates.addAll(whereSegments.stream().map(each -> ExpressionExtractUtil.getAndPredicates(each.getExpr())).flatMap(Collection::stream).collect(Collectors.toList())); } Map<String, String> columnTableNames = getColumnTableNames(sqlStatementContext, andPredicates, whereSegments); result.addAll(andPredicates.stream().map(each -> generateSQLTokens(each.getPredicates(), columnTableNames)).flatMap(Collection::stream).collect(Collectors.toList())); return result; }
class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, QueryWithCipherColumnAware { private ShardingSphereSchema schema; private boolean queryWithCipherColumn; @Override protected boolean isGenerateSQLTokenForEncrypt(final SQLStatementContext sqlStatementContext) { return sqlStatementContext instanceof WhereAvailable && ((WhereAvailable) sqlStatementContext).getWhere().isPresent(); } @Override private Collection<SubstitutableColumnNameToken> generateSQLTokens(final Collection<ExpressionSegment> expressionSegments, final Map<String, String> columnTableNames) { Collection<SubstitutableColumnNameToken> result = new LinkedList<>(); expressionSegments.forEach(each -> result.addAll(generateSQLTokensOnColumnSegments(ColumnExtractor.extractAll(each), columnTableNames))); return result; } private Collection<SubstitutableColumnNameToken> generateSQLTokensOnColumnSegments(final Collection<ColumnSegment> columnSegments, final Map<String, String> columnTableNames) { Collection<SubstitutableColumnNameToken> result = new LinkedList<>(); for (ColumnSegment each : columnSegments) { Optional<EncryptTable> encryptTable = findEncryptTable(columnTableNames, each); if (!encryptTable.isPresent() || !encryptTable.get().findEncryptorName(each.getIdentifier().getValue()).isPresent()) { continue; } int startIndex = each.getOwner().isPresent() ? each.getOwner().get().getStopIndex() + 2 : each.getStartIndex(); int stopIndex = each.getStopIndex(); if (!queryWithCipherColumn) { Optional<String> plainColumn = encryptTable.get().findPlainColumn(each.getIdentifier().getValue()); if (plainColumn.isPresent()) { result.add(new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(plainColumn.get()))); continue; } } Optional<String> assistedQueryColumn = encryptTable.get().findAssistedQueryColumn(each.getIdentifier().getValue()); SubstitutableColumnNameToken encryptColumnNameToken = assistedQueryColumn.map(columnName -> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(columnName))).orElseGet(() -> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(encryptTable.get().getCipherColumn(each.getIdentifier().getValue())))); result.add(encryptColumnNameToken); } return result; } private Map<String, String> getColumnTableNames(final SQLStatementContext sqlStatementContext, final Collection<AndPredicate> andPredicates) { Collection<ColumnSegment> columns = new ArrayList<ColumnSegment>(); andPredicates.forEach(each -> columns.addAll(generateColumnSegments(each.getPredicates()))); return sqlStatementContext.getTablesContext().findTableName(columns, schema); } private Collection<ColumnSegment> generateColumnSegments(final Collection<ExpressionSegment> expressionSegments) { Collection<ColumnSegment> result = new ArrayList<ColumnSegment>(); expressionSegments.forEach(each -> result.addAll(ColumnExtractor.extractAll(each))); return result; } private Optional<EncryptTable> findEncryptTable(final Map<String, String> columnTableNames, final ColumnSegment column) { return Optional.ofNullable(columnTableNames.get(column.getQualifiedName())).flatMap(tableName -> getEncryptRule().findEncryptTable(tableName)); } private Collection<ColumnProjection> getColumnProjections(final String columnName) { return Collections.singletonList(new ColumnProjection(null, columnName, null)); } }
class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, QueryWithCipherColumnAware { private ShardingSphereSchema schema; private boolean queryWithCipherColumn; @Override protected boolean isGenerateSQLTokenForEncrypt(final SQLStatementContext sqlStatementContext) { return isGenerateSQLTokenForEncryptOnWhereAvailable(sqlStatementContext) || isGenerateSQLTokenForEncryptOnJoinSegments(sqlStatementContext); } private boolean isGenerateSQLTokenForEncryptOnWhereAvailable(final SQLStatementContext sqlStatementContext) { return sqlStatementContext instanceof WhereAvailable && ((WhereAvailable) sqlStatementContext).getWhere().isPresent(); } private boolean isGenerateSQLTokenForEncryptOnJoinSegments(final SQLStatementContext sqlStatementContext) { return sqlStatementContext instanceof SelectStatementContext && ((SelectStatementContext) sqlStatementContext).isContainsJoinQuery(); } @Override private Collection<SubstitutableColumnNameToken> generateSQLTokens(final Collection<ExpressionSegment> predicates, final Map<String, String> columnTableNames) { Collection<SubstitutableColumnNameToken> result = new LinkedList<>(); for (ExpressionSegment each : predicates) { for (ColumnSegment column : ColumnExtractor.extract(each)) { Optional<EncryptTable> encryptTable = findEncryptTable(columnTableNames, column); if (!encryptTable.isPresent() || !encryptTable.get().findEncryptorName(column.getIdentifier().getValue()).isPresent()) { continue; } int startIndex = column.getOwner().isPresent() ? column.getOwner().get().getStopIndex() + 2 : column.getStartIndex(); int stopIndex = column.getStopIndex(); if (!queryWithCipherColumn) { Optional<String> plainColumn = encryptTable.get().findPlainColumn(column.getIdentifier().getValue()); if (plainColumn.isPresent()) { result.add(new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(plainColumn.get()))); continue; } } Optional<String> assistedQueryColumn = encryptTable.get().findAssistedQueryColumn(column.getIdentifier().getValue()); SubstitutableColumnNameToken encryptColumnNameToken = assistedQueryColumn.map(columnName -> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(columnName))).orElseGet(() -> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(encryptTable.get().getCipherColumn(column.getIdentifier().getValue())))); result.add(encryptColumnNameToken); } } return result; } private Map<String, String> getColumnTableNames(final SQLStatementContext<?> sqlStatementContext, final Collection<AndPredicate> andPredicates, final Collection<WhereSegment> whereSegments) { Collection<ColumnSegment> columns = andPredicates.stream().flatMap(each -> each.getPredicates().stream()) .flatMap(each -> ColumnExtractor.extract(each).stream()).filter(Objects::nonNull).collect(Collectors.toList()); columns.addAll(whereSegments.stream().map(each -> ColumnExtractor.extract(each.getExpr())).flatMap(Collection::stream).collect(Collectors.toList())); return sqlStatementContext.getTablesContext().findTableName(columns, schema); } private Optional<EncryptTable> findEncryptTable(final Map<String, String> columnTableNames, final ColumnSegment column) { return Optional.ofNullable(columnTableNames.get(column.getQualifiedName())).flatMap(tableName -> getEncryptRule().findEncryptTable(tableName)); } private Collection<ColumnProjection> getColumnProjections(final String columnName) { return Collections.singletonList(new ColumnProjection(null, columnName, null)); } }
shouldn't we iterate over the whole set of messages? It might not be the last one.
public void testRegisterHealthOnBlockingThreadStep1() { try { Thread.sleep(5000); } catch (InterruptedException e) { throw new RuntimeException(e); } try { RestAssured.defaultParser = Parser.JSON; for (int i = 0; i < 3; i++) { RestAssured.when().get("/q/health").then() .body("status", is("UP"), "checks.status", contains("UP"), "checks.name", contains("blocking")); } } finally { RestAssured.reset(); } if (!inMemoryLogHandler.getRecords().isEmpty()) { LogRecord logRecord = inMemoryLogHandler.getRecords().get(0); assertEquals(Level.WARNING, logRecord.getLevel()); assertFalse(logRecord.getMessage().contains("has been blocked for"), "The blocking health check ran on eventloop thread"); } }
assertFalse(logRecord.getMessage().contains("has been blocked for"),
public void testRegisterHealthOnBlockingThreadStep1() { given() .when().get("/start-health") .then().statusCode(200); try { RestAssured.defaultParser = Parser.JSON; for (int i = 0; i < 3; i++) { RestAssured.when().get("/q/health").then() .body("status", is("UP"), "checks.status", contains("UP"), "checks.name", contains("blocking")); } } finally { RestAssured.reset(); } }
class BlockingNonBlockingTest { private static final java.util.logging.Logger rootLogger = java.util.logging.LogManager.getLogManager() .getLogger("io.vertx.core"); private static final InMemoryLogHandler inMemoryLogHandler = new InMemoryLogHandler( record -> record.getLevel().intValue() >= Level.WARNING.intValue()); @BeforeEach public void setLogHandler() { inMemoryLogHandler.getRecords().clear(); rootLogger.addHandler(inMemoryLogHandler); } @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(BlockingHealthCheck.class, StartupBean.class) .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")); @Test @Liveness static final class BlockingHealthCheck implements HealthCheck { @Override public HealthCheckResponse call() { try { Thread.sleep(3000); } catch (InterruptedException e) { throw new RuntimeException(e); } return HealthCheckResponse.up("blocking"); } } @ApplicationScoped static final class StartupBean { @Inject SmallRyeHealthReporter smallRyeHealthReporter; @Blocking public void startup(@Observes StartupEvent event) { smallRyeHealthReporter.getHealth(); } } }
class BlockingNonBlockingTest { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(BlockingHealthCheck.class, Routes.class) .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")); @Test @Liveness static final class BlockingHealthCheck implements HealthCheck { @Override public HealthCheckResponse call() { Uni.createFrom().item(42).onItem().delayIt().by(Duration.ofMillis(10)).await().indefinitely(); return HealthCheckResponse.up("blocking"); } } @ApplicationScoped static final class Routes { @Inject SmallRyeHealthReporter smallRyeHealthReporter; @Route(path = "/start-health", methods = Route.HttpMethod.GET) @Blocking public String health() { return smallRyeHealthReporter.getHealth().toString(); } } }
If we are to introduce error reporting will need considerable changes. error reporting require package to be loaded atm (ie; for it to be in `BLangDiagnosticLog.packageCache`). Hence, going with throwing exception
private void defineImportPackage(DataInputStream dataInStream) throws IOException { String orgName = getStringCPEntryValue(dataInStream); String pkgName = getStringCPEntryValue(dataInStream); String moduleName = getStringCPEntryValue(dataInStream); String pkgVersion = getStringCPEntryValue(dataInStream); PackageID importPkgID = createPackageID(orgName, pkgName, moduleName, pkgVersion); BPackageSymbol importPackageSymbol = packageCache.getSymbol(importPkgID); if (importPackageSymbol == null) { throw new BLangCompilerException("cannot resolve module " + importPkgID); } this.env.pkgSymbol.scope.define(importPkgID.name, importPackageSymbol); this.env.pkgSymbol.imports.add(importPackageSymbol); }
throw new BLangCompilerException("cannot resolve module " + importPkgID);
private void defineImportPackage(DataInputStream dataInStream) throws IOException { String orgName = getStringCPEntryValue(dataInStream); String pkgName = getStringCPEntryValue(dataInStream); String moduleName = getStringCPEntryValue(dataInStream); String pkgVersion = getStringCPEntryValue(dataInStream); PackageID importPkgID = createPackageID(orgName, pkgName, moduleName, pkgVersion); BPackageSymbol importPackageSymbol = packageCache.getSymbol(importPkgID); if (importPackageSymbol == null) { throw new BLangCompilerException("cannot resolve module " + importPkgID); } this.env.pkgSymbol.scope.define(importPkgID.name, importPackageSymbol); this.env.pkgSymbol.imports.add(importPackageSymbol); }
class BIRPackageSymbolEnter { private final PackageCache packageCache; private final SymbolResolver symbolResolver; private final SymbolTable symTable; private final Names names; private final TypeParamAnalyzer typeParamAnalyzer; private final Types types; private BIRTypeReader typeReader; private BIRPackageSymbolEnv env; private List<BStructureTypeSymbol> structureTypes; private BStructureTypeSymbol currentStructure = null; private LinkedList<Object> compositeStack = new LinkedList<>(); private static final int SERVICE_TYPE_TAG = 53; private static final CompilerContext.Key<BIRPackageSymbolEnter> COMPILED_PACKAGE_SYMBOL_ENTER_KEY = new CompilerContext.Key<>(); private Map<String, BVarSymbol> globalVarMap = new HashMap<>(); public static BIRPackageSymbolEnter getInstance(CompilerContext context) { BIRPackageSymbolEnter packageReader = context.get(COMPILED_PACKAGE_SYMBOL_ENTER_KEY); if (packageReader == null) { packageReader = new BIRPackageSymbolEnter(context); } return packageReader; } private BIRPackageSymbolEnter(CompilerContext context) { context.put(COMPILED_PACKAGE_SYMBOL_ENTER_KEY, this); this.packageCache = PackageCache.getInstance(context); this.symbolResolver = SymbolResolver.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.names = Names.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.types = Types.getInstance(context); } public BPackageSymbol definePackage(PackageID packageId, byte[] packageBinaryContent) { BPackageSymbol pkgSymbol = definePackage(packageId, new ByteArrayInputStream(packageBinaryContent)); byte[] modifiedPkgBinaryContent = Arrays.copyOfRange( packageBinaryContent, 8, packageBinaryContent.length); pkgSymbol.birPackageFile = new CompiledBinaryFile.BIRPackageFile(modifiedPkgBinaryContent); SymbolEnv builtinEnv = this.symTable.pkgEnvMap.get(symTable.langAnnotationModuleSymbol); SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(null, pkgSymbol.scope, builtinEnv); this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv); return pkgSymbol; } private BPackageSymbol definePackage(PackageID packageId, InputStream programFileInStream) { try (DataInputStream dataInStream = new DataInputStream(programFileInStream)) { BIRPackageSymbolEnv prevEnv = this.env; this.env = new BIRPackageSymbolEnv(); this.env.requestedPackageId = packageId; BPackageSymbol pkgSymbol = definePackage(dataInStream); this.env = prevEnv; return pkgSymbol; } catch (Throwable e) { throw new BLangCompilerException("failed to load the module '" + packageId.toString() + "' from its BIR" + (e.getMessage() != null ? (" due to: " + e.getMessage()) : ""), e); } } private BPackageSymbol definePackage(DataInputStream dataInStream) throws IOException { byte[] magic = new byte[4]; dataInStream.read(magic, 0, 4); if (!Arrays.equals(magic, BIRPackageFile.BIR_MAGIC)) { throw new BLangCompilerException("invalid magic number " + Arrays.toString(magic)); } int version = dataInStream.readInt(); if (version != BIRPackageFile.BIR_VERSION) { throw new BLangCompilerException("unsupported program file version " + version); } this.env.constantPool = readConstantPool(dataInStream); int pkgCPIndex = dataInStream.readInt(); return definePackage(dataInStream, pkgCPIndex); } private BPackageSymbol definePackage(DataInputStream dataInStream, int pkgCpIndex) throws IOException { PackageCPEntry pkgCpEntry = (PackageCPEntry) this.env.constantPool[pkgCpIndex]; String orgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.orgNameCPIndex]).value; String pkgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.pkgNameCPIndex]).value; String moduleName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.moduleNameCPIndex]).value; String pkgVersion = ((StringCPEntry) this.env.constantPool[pkgCpEntry.versionCPIndex]).value; PackageID pkgId = createPackageID(orgName, pkgName, moduleName, pkgVersion); this.env.pkgSymbol = Symbols.createPackageSymbol(pkgId, this.symTable, COMPILED_SOURCE); defineSymbols(dataInStream, rethrow(this::defineImportPackage)); defineSymbols(dataInStream, rethrow(this::defineConstant)); this.structureTypes = new ArrayList<>(); defineSymbols(dataInStream, rethrow(this::defineTypeDef)); defineSymbols(dataInStream, rethrow(this::definePackageLevelVariables)); readTypeDefBodies(dataInStream); defineSymbols(dataInStream, rethrow(this::defineFunction)); defineSymbols(dataInStream, rethrow(this::defineAnnotations)); defineSymbols(dataInStream, rethrow(this::defineServiceDeclarations)); populateReferencedFunctions(); this.typeReader = null; return this.env.pkgSymbol; } private void populateReferencedFunctions() { for (BStructureTypeSymbol structureTypeSymbol : this.structureTypes) { if (structureTypeSymbol.type.tag == TypeTags.OBJECT) { BObjectType objectType = (BObjectType) structureTypeSymbol.type; for (BType ref : objectType.typeInclusions) { BType typeRef = Types.getReferredType(ref); if (typeRef.tsymbol == null || typeRef.tsymbol.kind != SymbolKind.OBJECT) { continue; } List<BAttachedFunction> attachedFunctions = ((BObjectTypeSymbol) typeRef.tsymbol).attachedFuncs; for (BAttachedFunction function : attachedFunctions) { if (Symbols.isPrivate(function.symbol)) { continue; } String referencedFuncName = function.funcName.value; Name funcName = names.fromString( Symbols.getAttachedFuncSymbolName(structureTypeSymbol.name.value, referencedFuncName)); Scope.ScopeEntry matchingObjFuncSym = objectType.tsymbol.scope.lookup(funcName); if (matchingObjFuncSym == NOT_FOUND_ENTRY) { structureTypeSymbol.attachedFuncs.add(function); ((BObjectTypeSymbol) structureTypeSymbol).referencedFunctions.add(function); } } } } } } private void readTypeDefBodies(DataInputStream dataInStream) throws IOException { dataInStream.readInt(); for (BStructureTypeSymbol structureTypeSymbol : this.structureTypes) { this.currentStructure = structureTypeSymbol; defineSymbols(dataInStream, rethrow(this::defineFunction)); defineSymbols(dataInStream, rethrow(this::readBType)); } this.currentStructure = null; } private CPEntry[] readConstantPool(DataInputStream dataInStream) throws IOException { int constantPoolSize = dataInStream.readInt(); CPEntry[] constantPool = new CPEntry[constantPoolSize]; this.env.constantPool = constantPool; for (int i = 0; i < constantPoolSize; i++) { byte cpTag = dataInStream.readByte(); CPEntry.Type cpEntryType = CPEntry.Type.values()[cpTag - 1]; constantPool[i] = readCPEntry(dataInStream, constantPool, cpEntryType, i); } return constantPool; } private CPEntry readCPEntry(DataInputStream dataInStream, CPEntry[] constantPool, CPEntry.Type cpEntryType, int i) throws IOException { switch (cpEntryType) { case CP_ENTRY_INTEGER: return new CPEntry.IntegerCPEntry(dataInStream.readLong()); case CP_ENTRY_FLOAT: return new CPEntry.FloatCPEntry(dataInStream.readDouble()); case CP_ENTRY_BOOLEAN: return new CPEntry.BooleanCPEntry(dataInStream.readBoolean()); case CP_ENTRY_STRING: int length = dataInStream.readInt(); String strValue = null; if (length >= 0) { byte[] bytes = new byte[length]; dataInStream.read(bytes, 0, length); strValue = new String(bytes); } return new CPEntry.StringCPEntry(strValue); case CP_ENTRY_PACKAGE: return new CPEntry.PackageCPEntry(dataInStream.readInt(), dataInStream.readInt(), dataInStream.readInt(), dataInStream.readInt()); case CP_ENTRY_SHAPE: env.unparsedBTypeCPs.put(i, readByteArray(dataInStream)); return null; case CP_ENTRY_BYTE: return new CPEntry.ByteCPEntry(dataInStream.readInt()); default: throw new IllegalStateException("unsupported constant pool entry type: " + cpEntryType.name()); } } private byte[] readByteArray(DataInputStream dataInStream) throws IOException { int length = dataInStream.readInt(); byte[] bytes = new byte[length]; dataInStream.readFully(bytes); return bytes; } private void defineSymbols(DataInputStream dataInStream, Consumer<DataInputStream> symbolDefineFunc) throws IOException { int symbolCount = dataInStream.readInt(); for (int i = 0; i < symbolCount; i++) { symbolDefineFunc.accept(dataInStream); } } private void defineFunction(DataInputStream dataInStream) throws IOException { Location pos = readPosition(dataInStream); String funcName = getStringCPEntryValue(dataInStream); String funcOrigName = getStringCPEntryValue(dataInStream); String workerName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); BInvokableType funcType = (BInvokableType) readBType(dataInStream); BInvokableSymbol invokableSymbol = Symbols.createFunctionSymbol(flags, names.fromString(funcName), names.fromString(funcOrigName), this.env.pkgSymbol.pkgID, funcType, this.env.pkgSymbol, Symbols.isFlagOn(flags, Flags.NATIVE), pos, toOrigin(origin)); invokableSymbol.source = pos.lineRange().filePath(); invokableSymbol.retType = funcType.retType; Scope scopeToDefine = this.env.pkgSymbol.scope; if (this.currentStructure != null) { BType attachedType = Types.getReferredType(this.currentStructure.type); invokableSymbol.owner = attachedType.tsymbol; invokableSymbol.name = names.fromString(Symbols.getAttachedFuncSymbolName(attachedType.tsymbol.name.value, funcName)); if (attachedType.tag == TypeTags.OBJECT || attachedType.tag == TypeTags.RECORD) { scopeToDefine = attachedType.tsymbol.scope; BAttachedFunction attachedFunc = new BAttachedFunction(names.fromString(funcName), invokableSymbol, funcType, symTable.builtinPos); BStructureTypeSymbol structureTypeSymbol = (BStructureTypeSymbol) attachedType.tsymbol; if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcName) || funcName.equals(Names.INIT_FUNCTION_SUFFIX.value)) { structureTypeSymbol.initializerFunc = attachedFunc; } else if (funcName.equals(Names.GENERATED_INIT_SUFFIX.value)) { ((BObjectTypeSymbol) structureTypeSymbol).generatedInitializerFunc = attachedFunc; } else { structureTypeSymbol.attachedFuncs.add(attachedFunc); } } } dataInStream.skip(dataInStream.readLong()); dataInStream.skip(dataInStream.readLong()); setParamSymbols(invokableSymbol, dataInStream); defineMarkDownDocAttachment(invokableSymbol, readDocBytes(dataInStream)); defineGlobalVarDependencies(invokableSymbol, dataInStream); dataInStream.skip(dataInStream.readLong()); dataInStream.skip(dataInStream.readLong()); scopeToDefine.define(invokableSymbol.name, invokableSymbol); } private void defineGlobalVarDependencies(BInvokableSymbol invokableSymbol, DataInputStream dataInStream) throws IOException { long length = dataInStream.readInt(); for (int i = 0; i < length; i++) { String globalVarName = getStringCPEntryValue(dataInStream.readInt()); invokableSymbol.dependentGlobalVars.add(this.globalVarMap.get(globalVarName)); } } private void defineTypeDef(DataInputStream dataInStream) throws IOException { Location pos = readPosition(dataInStream); String typeDefName = getStringCPEntryValue(dataInStream); String typeDefOrigName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); byte[] docBytes = readDocBytes(dataInStream); BType type = readBType(dataInStream); BTypeReferenceType referenceType = null; boolean hasReferenceType = dataInStream.readBoolean(); if (hasReferenceType) { BTypeSymbol typeSymbol = new BTypeSymbol(SymTag.TYPE_REF, flags, names.fromString(typeDefName), this.env.pkgSymbol.pkgID, type, this.env.pkgSymbol, pos, COMPILED_SOURCE); referenceType = new BTypeReferenceType(type, typeSymbol, flags); } if (type.tag == TypeTags.INVOKABLE) { setInvokableTypeSymbol((BInvokableType) type); } boolean isClass = Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS); flags = isClass ? flags | Flags.CLASS : flags; flags = Symbols.isFlagOn(type.tsymbol.flags, Flags.CLIENT) ? flags | Flags.CLIENT : flags; BSymbol symbol; boolean isEnum = Symbols.isFlagOn(type.tsymbol.flags, Flags.ENUM); if (isClass || isEnum) { symbol = type.tsymbol; symbol.pos = pos; } else { symbol = Symbols.createTypeDefinitionSymbol(flags, names.fromString(typeDefName), this.env.pkgSymbol.pkgID, type, this.env.pkgSymbol, pos, COMPILED_SOURCE); ((BTypeDefinitionSymbol) symbol).referenceType = referenceType; } symbol.originalName = names.fromString(typeDefOrigName); symbol.origin = toOrigin(origin); symbol.flags = flags; defineMarkDownDocAttachment(symbol, docBytes); defineAnnotAttachmentSymbols(dataInStream, (isClass || isEnum || symbol.tag == SymTag.TYPE_DEF) ? (Annotatable) symbol : null); if (type.tsymbol.name == Names.EMPTY && type.tag != TypeTags.INVOKABLE) { type.tsymbol.name = symbol.name; type.tsymbol.originalName = symbol.originalName; } if (type.tag == TypeTags.RECORD || type.tag == TypeTags.OBJECT) { if (!isClass) { ((BStructureTypeSymbol) type.tsymbol).typeDefinitionSymbol = (BTypeDefinitionSymbol) symbol; } type.tsymbol.origin = toOrigin(origin); this.structureTypes.add((BStructureTypeSymbol) type.tsymbol); } this.env.pkgSymbol.scope.define(symbol.name, symbol); } private void skipPosition(DataInputStream dataInStream) throws IOException { for (int i = 0; i < 4; i++) { dataInStream.readInt(); } } private void setInvokableTypeSymbol(BInvokableType invokableType) { if (Symbols.isFlagOn(invokableType.flags, Flags.ANY_FUNCTION)) { return; } BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol; List<BVarSymbol> params = new ArrayList<>(invokableType.paramTypes.size()); for (BType paramType : invokableType.paramTypes) { BVarSymbol varSymbol = new BVarSymbol(paramType.flags, Names.EMPTY, this.env.pkgSymbol.pkgID, paramType, null, symTable.builtinPos, COMPILED_SOURCE); params.add(varSymbol); } tsymbol.params = params; if (invokableType.restType != null) { tsymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.pkgSymbol.pkgID, invokableType.restType, null, symTable.builtinPos, COMPILED_SOURCE); } tsymbol.returnType = invokableType.retType; } private void defineMarkDownDocAttachment(BSymbol symbol, byte[] docBytes) throws IOException { DataInputStream dataInStream = new DataInputStream(new ByteArrayInputStream(docBytes)); boolean docPresent = dataInStream.readBoolean(); if (!docPresent) { return; } int descCPIndex = dataInStream.readInt(); int retDescCPIndex = dataInStream.readInt(); int paramLength = dataInStream.readInt(); MarkdownDocAttachment markdownDocAttachment = new MarkdownDocAttachment(paramLength); markdownDocAttachment.description = descCPIndex >= 0 ? getStringCPEntryValue(descCPIndex) : null; markdownDocAttachment.returnValueDescription = retDescCPIndex >= 0 ? getStringCPEntryValue(retDescCPIndex) : null; readAndSetParamDocumentation(dataInStream, markdownDocAttachment.parameters, paramLength); int deprecatedDescCPIndex = dataInStream.readInt(); int deprecatedParamLength = dataInStream.readInt(); markdownDocAttachment.deprecatedDocumentation = deprecatedDescCPIndex >= 0 ? getStringCPEntryValue(deprecatedDescCPIndex) : null; readAndSetParamDocumentation(dataInStream, markdownDocAttachment.deprecatedParams, deprecatedParamLength); symbol.markdownDocumentation = markdownDocAttachment; } private void readAndSetParamDocumentation(DataInputStream inputStream, List<MarkdownDocAttachment.Parameter> params, int nParams) throws IOException { for (int i = 0; i < nParams; i++) { int nameCPIndex = inputStream.readInt(); int paramDescCPIndex = inputStream.readInt(); String name = nameCPIndex >= 0 ? getStringCPEntryValue(nameCPIndex) : null; String description = paramDescCPIndex >= 0 ? getStringCPEntryValue(paramDescCPIndex) : null; MarkdownDocAttachment.Parameter parameter = new MarkdownDocAttachment.Parameter(name, description); params.add(parameter); } } private BType readBType(DataInputStream dataInStream) throws IOException { int typeCpIndex = dataInStream.readInt(); CPEntry cpEntry = this.env.constantPool[typeCpIndex]; BType type = null; if (cpEntry != null) { type = ((CPEntry.ShapeCPEntry) cpEntry).shape; if (type.tag != TypeTags.INVOKABLE) { return type; } } if (type == null) { byte[] e = env.unparsedBTypeCPs.get(typeCpIndex); type = new BIRTypeReader(new DataInputStream(new ByteArrayInputStream(e))).readType(typeCpIndex); addShapeCP(type, typeCpIndex); } if (type.tag == TypeTags.INVOKABLE) { return createClonedInvokableTypeWithTsymbol((BInvokableType) type); } return type; } private BInvokableType createClonedInvokableTypeWithTsymbol(BInvokableType bInvokableType) { BInvokableType clonedType; if (Symbols.isFlagOn(bInvokableType.flags, Flags.ANY_FUNCTION)) { clonedType = new BInvokableType(null, null, null, null); } else { clonedType = new BInvokableType(bInvokableType.paramTypes, bInvokableType.restType, bInvokableType.retType, null); } clonedType.tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, bInvokableType.flags, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); clonedType.flags = bInvokableType.flags; return clonedType; } private void addShapeCP(BType bType, int typeCpIndex) { this.env.constantPool[typeCpIndex] = new CPEntry.ShapeCPEntry(bType); } private void defineAnnotations(DataInputStream dataInStream) throws IOException { BAnnotationSymbol annotationSymbol = defineAnnotation(dataInStream); this.env.pkgSymbol.scope.define(annotationSymbol.name, annotationSymbol); } private BAnnotationSymbol defineAnnotation(DataInputStream dataInStream) throws IOException { int pkgCpIndex = dataInStream.readInt(); PackageID pkgId = getPackageId(pkgCpIndex); String name = getStringCPEntryValue(dataInStream); String originalName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); Location pos = readPosition(dataInStream); int attachPointCount = dataInStream.readInt(); Set<AttachPoint> attachPoints = new HashSet<>(attachPointCount); for (int i = 0; i < attachPointCount; i++) { attachPoints.add(AttachPoint.getAttachmentPoint(getStringCPEntryValue(dataInStream), dataInStream.readBoolean())); } BType annotationType = readBType(dataInStream); BPackageSymbol pkgSymbol = pkgId.equals(env.pkgSymbol.pkgID) ? this.env.pkgSymbol : packageCache.getSymbol(pkgId); BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(flags, attachPoints, names.fromString(name), names.fromString(originalName), pkgId, null, pkgSymbol, pos, toOrigin(origin)); annotationSymbol.type = new BAnnotationType(annotationSymbol); defineMarkDownDocAttachment(annotationSymbol, readDocBytes(dataInStream)); defineAnnotAttachmentSymbols(dataInStream, annotationSymbol); if (annotationType != symTable.noType) { annotationSymbol.attachedType = annotationType; } return annotationSymbol; } private BAnnotationAttachmentSymbol defineAnnotationAttachmentSymbol(DataInputStream dataInStream, BSymbol owner) throws IOException { PackageID pkgId = getPackageId(dataInStream.readInt()); Location pos = readPosition(dataInStream); Name annotTagRef = Names.fromString(getStringCPEntryValue(dataInStream.readInt())); boolean constAnnotation = dataInStream.readBoolean(); if (!constAnnotation) { return new BAnnotationAttachmentSymbol(pkgId, annotTagRef, this.env.pkgSymbol.pkgID, owner, pos, COMPILED_SOURCE, null); } BType constantValType = readBType(dataInStream); BConstantSymbol constantSymbol = new BConstantSymbol(0, Names.EMPTY, Names.EMPTY, this.env.pkgSymbol.pkgID, null, constantValType, owner, pos, COMPILED_SOURCE); constantSymbol.value = readConstLiteralValue(constantValType, dataInStream); constantSymbol.literalType = constantSymbol.value.type; return new BAnnotationAttachmentSymbol.BConstAnnotationAttachmentSymbol(pkgId, annotTagRef, this.env.pkgSymbol.pkgID, owner, pos, COMPILED_SOURCE, constantSymbol, null); } private void defineConstant(DataInputStream dataInStream) throws IOException { String constantName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); Location pos = readPosition(dataInStream); byte[] docBytes = readDocBytes(dataInStream); BType type = readBType(dataInStream); Scope enclScope = this.env.pkgSymbol.scope; BConstantSymbol constantSymbol = new BConstantSymbol(flags, names.fromString(constantName), this.env.pkgSymbol.pkgID, null, type, enclScope.owner, pos, toOrigin(origin)); defineMarkDownDocAttachment(constantSymbol, docBytes); defineAnnotAttachmentSymbols(dataInStream, constantSymbol); dataInStream.readLong(); BType constantValType = readBType(dataInStream); constantSymbol.value = readConstLiteralValue(constantValType, dataInStream); constantSymbol.literalType = constantSymbol.value.type; enclScope.define(constantSymbol.name, constantSymbol); } private BLangConstantValue readConstLiteralValue(BType valueType, DataInputStream dataInStream) throws IOException { switch (valueType.tag) { case TypeTags.INT: return new BLangConstantValue(getIntCPEntryValue(dataInStream), symTable.intType); case TypeTags.BYTE: return new BLangConstantValue(getByteCPEntryValue(dataInStream), symTable.byteType); case TypeTags.FLOAT: return new BLangConstantValue(getFloatCPEntryValue(dataInStream), symTable.floatType); case TypeTags.STRING: return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.stringType); case TypeTags.DECIMAL: return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.decimalType); case TypeTags.BOOLEAN: return new BLangConstantValue(dataInStream.readBoolean(), symTable.booleanType); case TypeTags.NIL: return new BLangConstantValue(null, symTable.nilType); case TypeTags.RECORD: int size = dataInStream.readInt(); Map<String, BLangConstantValue> keyValuePairs = new LinkedHashMap<>(); for (int i = 0; i < size; i++) { String key = getStringCPEntryValue(dataInStream); BType type = readBType(dataInStream); BLangConstantValue value = readConstLiteralValue(type, dataInStream); keyValuePairs.put(key, value); } return new BLangConstantValue(keyValuePairs, valueType); case TypeTags.TUPLE: int tupleSize = dataInStream.readInt(); List<BLangConstantValue> members = new ArrayList<>(tupleSize); for (int i = 0; i < tupleSize; i++) { BType type = readBType(dataInStream); BLangConstantValue value = readConstLiteralValue(type, dataInStream); members.add(value); } return new BLangConstantValue(members, valueType); case TypeTags.INTERSECTION: return readConstLiteralValue(((BIntersectionType) valueType).effectiveType, dataInStream); case TypeTags.TYPEREFDESC: return readConstLiteralValue(Types.getReferredType(valueType), dataInStream); default: throw new RuntimeException("unexpected type: " + valueType); } } private void defineServiceDeclarations(DataInputStream inputStream) throws IOException { String serviceName = getStringCPEntryValue(inputStream); String associatedClassName = getStringCPEntryValue(inputStream); long flags = inputStream.readLong(); byte origin = inputStream.readByte(); Location pos = readPosition(inputStream); BType type = null; if (inputStream.readBoolean()) { type = readBType(inputStream); } List<String> attachPoint = null; if (inputStream.readBoolean()) { attachPoint = new ArrayList<>(); int nSegments = inputStream.readInt(); for (int i = 0; i < nSegments; i++) { attachPoint.add(getStringCPEntryValue(inputStream)); } } String attachPointLiteral = null; if (inputStream.readBoolean()) { attachPointLiteral = getStringCPEntryValue(inputStream); } BSymbol classSymbol = this.env.pkgSymbol.scope.lookup(names.fromString(associatedClassName)).symbol; BServiceSymbol serviceDecl = new BServiceSymbol((BClassSymbol) classSymbol, flags, names.fromString(serviceName), this.env.pkgSymbol.pkgID, type, this.env.pkgSymbol, pos, SymbolOrigin.toOrigin(origin)); int nListeners = inputStream.readInt(); for (int i = 0; i < nListeners; i++) { serviceDecl.addListenerType(readBType(inputStream)); } serviceDecl.setAttachPointStringLiteral(attachPointLiteral); serviceDecl.setAbsResourcePath(attachPoint); this.env.pkgSymbol.scope.define(names.fromString(serviceName), serviceDecl); } private void definePackageLevelVariables(DataInputStream dataInStream) throws IOException { dataInStream.readByte(); String varName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); byte[] docBytes = readDocBytes(dataInStream); BType varType = readBType(dataInStream); Scope enclScope = this.env.pkgSymbol.scope; BVarSymbol varSymbol; if (varType.tag == TypeTags.INVOKABLE) { BInvokableSymbol invokableSymbol = new BInvokableSymbol(SymTag.VARIABLE, flags, names.fromString(varName), this.env.pkgSymbol.pkgID, varType, enclScope.owner, symTable.builtinPos, toOrigin(origin)); invokableSymbol.kind = SymbolKind.FUNCTION; invokableSymbol.retType = ((BInvokableType) invokableSymbol.type).retType; varSymbol = invokableSymbol; } else { varSymbol = new BVarSymbol(flags, names.fromString(varName), this.env.pkgSymbol.pkgID, varType, enclScope.owner, symTable.builtinPos, toOrigin(origin)); if (varType.tsymbol != null && Symbols.isFlagOn(varType.tsymbol.flags, Flags.CLIENT)) { varSymbol.tag = SymTag.ENDPOINT; } } this.globalVarMap.put(varName, varSymbol); defineMarkDownDocAttachment(varSymbol, docBytes); defineAnnotAttachmentSymbols(dataInStream, varSymbol); enclScope.define(varSymbol.name, varSymbol); } private void setParamSymbols(BInvokableSymbol invokableSymbol, DataInputStream dataInStream) throws IOException { int requiredParamCount = dataInStream.readInt(); BInvokableType invokableType = (BInvokableType) invokableSymbol.type; for (int i = 0; i < requiredParamCount; i++) { String paramName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); BVarSymbol varSymbol = new BVarSymbol(flags, names.fromString(paramName), this.env.pkgSymbol.pkgID, invokableType.paramTypes.get(i), invokableSymbol, symTable.builtinPos, COMPILED_SOURCE); varSymbol.isDefaultable = ((flags & Flags.OPTIONAL) == Flags.OPTIONAL); defineAnnotAttachmentSymbols(dataInStream, varSymbol); invokableSymbol.params.add(varSymbol); } if (dataInStream.readBoolean()) { String paramName = getStringCPEntryValue(dataInStream); BVarSymbol restParam = new BVarSymbol(0, names.fromString(paramName), this.env.pkgSymbol.pkgID, invokableType.restType, invokableSymbol, symTable.builtinPos, COMPILED_SOURCE); invokableSymbol.restParam = restParam; defineAnnotAttachmentSymbols(dataInStream, restParam); } if (Symbols.isFlagOn(invokableSymbol.retType.flags, Flags.PARAMETERIZED)) { Map<Name, BVarSymbol> paramsMap = new HashMap<>(); for (BVarSymbol param : invokableSymbol.params) { if (paramsMap.put(param.getName(), param) != null) { throw new IllegalStateException("duplicate key: " + param.getName()); } } populateParameterizedType(invokableSymbol.retType, paramsMap, invokableSymbol); } BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol; tsymbol.flags = invokableSymbol.flags; tsymbol.params = invokableSymbol.params; tsymbol.restParam = invokableSymbol.restParam; tsymbol.returnType = invokableSymbol.retType; boolean hasReceiver = dataInStream.readBoolean(); if (hasReceiver) { dataInStream.readByte(); readBType(dataInStream); getStringCPEntryValue(dataInStream); } } private void defineAnnotAttachmentSymbols(DataInputStream dataInStream, Annotatable owner) throws IOException { dataInStream.readLong(); int annotSymbolCount = dataInStream.readInt(); if (annotSymbolCount == 0) { return; } List<BAnnotationAttachmentSymbol> annotationAttachmentSymbols = (List<BAnnotationAttachmentSymbol>) owner.getAnnotations(); for (int j = 0; j < annotSymbolCount; j++) { annotationAttachmentSymbols.add(defineAnnotationAttachmentSymbol(dataInStream, (BSymbol) owner)); } } /** * This method is used for filling the `paramSymbol` field in a parameterized type. Since we want to use the same * symbol of the parameter referred to by the type, we have to wait until the parameter symbols are defined to fill * in the `paramSymbol` field. Only types with constituent types are considered here since those are the only types * which can recursively hold a parameterized type. * * @param type The return type of a function, which possibly contains a parameterized type * @param paramsMap A mapping between the parameter names and the parameter symbols of the function * @param invSymbol The symbol of the function */ private void populateParameterizedType(BType type, final Map<Name, BVarSymbol> paramsMap, BInvokableSymbol invSymbol) { if (type == null) { return; } switch (type.tag) { case TypeTags.PARAMETERIZED_TYPE: BParameterizedType varType = (BParameterizedType) type; varType.paramSymbol = paramsMap.get(varType.name); varType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PARAMETERIZED | varType.paramSymbol.flags, varType.paramSymbol.name, varType.paramSymbol.originalName, varType.paramSymbol.pkgID, varType, invSymbol, varType.paramSymbol.pos, VIRTUAL); break; case TypeTags.MAP: case TypeTags.FUTURE: case TypeTags.TYPEDESC: ConstrainedType constrainedType = (ConstrainedType) type; populateParameterizedType((BType) constrainedType.getConstraint(), paramsMap, invSymbol); break; case TypeTags.XML: populateParameterizedType(((BXMLType) type).constraint, paramsMap, invSymbol); break; case TypeTags.ARRAY: populateParameterizedType(((BArrayType) type).eType, paramsMap, invSymbol); break; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) type; for (BType t : tupleType.tupleTypes) { populateParameterizedType(t, paramsMap, invSymbol); } populateParameterizedType(tupleType.restType, paramsMap, invSymbol); break; case TypeTags.STREAM: BStreamType streamType = (BStreamType) type; populateParameterizedType(streamType.constraint, paramsMap, invSymbol); populateParameterizedType(streamType.completionType, paramsMap, invSymbol); break; case TypeTags.TABLE: BTableType tableType = (BTableType) type; populateParameterizedType(tableType.constraint, paramsMap, invSymbol); populateParameterizedType(tableType.keyTypeConstraint, paramsMap, invSymbol); break; case TypeTags.INVOKABLE: BInvokableType invokableType = (BInvokableType) type; if (Symbols.isFlagOn(invokableType.flags, Flags.ANY_FUNCTION)) { break; } for (BType t : invokableType.paramTypes) { populateParameterizedType(t, paramsMap, invSymbol); } populateParameterizedType(invokableType.restType, paramsMap, invSymbol); populateParameterizedType(invokableType.retType, paramsMap, invSymbol); break; case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType t : unionType.getMemberTypes()) { populateParameterizedType(t, paramsMap, invSymbol); } break; } } private Location readPosition(DataInputStream dataInStream) throws IOException { String cUnitName = getStringCPEntryValue(dataInStream); int sLine = dataInStream.readInt(); int sCol = dataInStream.readInt(); int eLine = dataInStream.readInt(); int eCol = dataInStream.readInt(); return new BLangDiagnosticLocation(cUnitName, sLine, eLine, sCol, eCol); } private String getStringCPEntryValue(DataInputStream dataInStream) throws IOException { int pkgNameCPIndex = dataInStream.readInt(); StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[pkgNameCPIndex]; return stringCPEntry.value; } private String getStringCPEntryValue(int cpIndex) throws IOException { StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[cpIndex]; return stringCPEntry.value; } private long getIntCPEntryValue(DataInputStream dataInStream) throws IOException { int pkgNameCPIndex = dataInStream.readInt(); IntegerCPEntry intCPEntry = (IntegerCPEntry) this.env.constantPool[pkgNameCPIndex]; return intCPEntry.value; } private int getByteCPEntryValue(DataInputStream dataInStream) throws IOException { int byteCpIndex = dataInStream.readInt(); ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex]; return byteCPEntry.value; } private String getFloatCPEntryValue(DataInputStream dataInStream) throws IOException { int floatCpIndex = dataInStream.readInt(); FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex]; return Double.toString(floatCPEntry.value); } private PackageID createPackageID(String orgName, String pkgName, String moduleName, String pkgVersion) { if (orgName == null || orgName.isEmpty()) { throw new BLangCompilerException("invalid module name '" + moduleName + "' in compiled package file"); } return new PackageID(names.fromString(orgName), names.fromString(pkgName), names.fromString(moduleName), names.fromString(pkgVersion), null); } /** * This class holds compiled package specific information during the symbol enter phase of the compiled package. * * @since 0.970.0 */ private static class BIRPackageSymbolEnv { PackageID requestedPackageId; Map<Integer, byte[]> unparsedBTypeCPs = new HashMap<>(); BPackageSymbol pkgSymbol; CPEntry[] constantPool; List<UnresolvedType> unresolvedTypes; BIRPackageSymbolEnv() { this.unresolvedTypes = new ArrayList<>(); } } private static class UnresolvedType { String typeSig; Consumer<BType> completer; UnresolvedType(String typeSig, Consumer<BType> completer) { this.typeSig = typeSig; this.completer = completer; } } private class BIRTypeReader { private DataInputStream inputStream; public BIRTypeReader(DataInputStream inputStream) { this.inputStream = inputStream; } private BType readTypeFromCp() throws IOException { return readBType(inputStream); } public BType readType(int cpI) throws IOException { byte tag = inputStream.readByte(); Name name = names.fromString(getStringCPEntryValue(inputStream)); var flags = inputStream.readLong(); int typeFlags = inputStream.readInt(); switch (tag) { case TypeTags.INT: return typeParamAnalyzer.getNominalType(symTable.intType, name, flags); case TypeTags.BYTE: return typeParamAnalyzer.getNominalType(symTable.byteType, name, flags); case TypeTags.FLOAT: return typeParamAnalyzer.getNominalType(symTable.floatType, name, flags); case TypeTags.DECIMAL: return typeParamAnalyzer.getNominalType(symTable.decimalType, name, flags); case TypeTags.STRING: return typeParamAnalyzer.getNominalType(symTable.stringType, name, flags); case TypeTags.BOOLEAN: return typeParamAnalyzer.getNominalType(symTable.booleanType, name, flags); case TypeTags.JSON: return isImmutable(flags) ? getEffectiveImmutableType(symTable.jsonType) : symTable.jsonType; case TypeTags.XML: BType constraintType = readTypeFromCp(); BXMLType mutableXmlType = new BXMLType(constraintType, symTable.xmlType.tsymbol); if (Symbols.isFlagOn(flags, Flags.PARAMETERIZED)) { mutableXmlType.flags |= Flags.PARAMETERIZED; } return isImmutable(flags) ? getEffectiveImmutableType(mutableXmlType) : mutableXmlType; case TypeTags.NIL: return symTable.nilType; case TypeTags.NEVER: return symTable.neverType; case TypeTags.ANYDATA: if (name.getValue().equals(Names.ANYDATA.getValue())) { name = Names.EMPTY; } BType anydataNominalType = typeParamAnalyzer.getNominalType(symTable.anydataType, name, flags); return isImmutable(flags) ? getEffectiveImmutableType(anydataNominalType, symTable.anydataType.tsymbol.pkgID, symTable.anydataType.tsymbol.owner) : anydataNominalType; case TypeTags.RECORD: int pkgCpIndex = inputStream.readInt(); PackageID pkgId = getPackageId(pkgCpIndex); String recordName = getStringCPEntryValue(inputStream); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(EnumSet.of(Flag.PUBLIC)), names.fromString(recordName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); recordSymbol.flags |= flags; recordSymbol.scope = new Scope(recordSymbol); BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags); recordType.flags |= flags; if (isImmutable(flags)) { recordSymbol.flags |= Flags.READONLY; } recordSymbol.type = recordType; compositeStack.push(recordType); addShapeCP(recordType, cpI); recordType.sealed = inputStream.readBoolean(); recordType.restFieldType = readTypeFromCp(); int recordFields = inputStream.readInt(); for (int i = 0; i < recordFields; i++) { String fieldName = getStringCPEntryValue(inputStream); var fieldFlags = inputStream.readLong(); byte[] docBytes = readDocBytes(inputStream); BType fieldType = readTypeFromCp(); BVarSymbol varSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName), recordSymbol.pkgID, fieldType, recordSymbol.scope.owner, symTable.builtinPos, COMPILED_SOURCE); defineMarkDownDocAttachment(varSymbol, docBytes); BField structField = new BField(varSymbol.name, varSymbol.pos, varSymbol); recordType.fields.put(structField.name.value, structField); recordSymbol.scope.define(varSymbol.name, varSymbol); } boolean isInitAvailable = inputStream.readByte() == 1; if (isInitAvailable) { String recordInitFuncName = getStringCPEntryValue(inputStream); var recordInitFuncFlags = inputStream.readLong(); BInvokableType recordInitFuncType = (BInvokableType) readTypeFromCp(); Name initFuncName = names.fromString(recordInitFuncName); boolean isNative = Symbols.isFlagOn(recordInitFuncFlags, Flags.NATIVE); BInvokableSymbol recordInitFuncSymbol = Symbols.createFunctionSymbol(recordInitFuncFlags, initFuncName, initFuncName, env.pkgSymbol.pkgID, recordInitFuncType, env.pkgSymbol, isNative, symTable.builtinPos, COMPILED_SOURCE); recordInitFuncSymbol.retType = recordInitFuncType.retType; recordSymbol.initializerFunc = new BAttachedFunction(initFuncName, recordInitFuncSymbol, recordInitFuncType, symTable.builtinPos); recordSymbol.scope.define(initFuncName, recordInitFuncSymbol); } recordType.typeInclusions = readTypeInclusions(); Object poppedRecordType = compositeStack.pop(); assert poppedRecordType == recordType; if (pkgId.equals(env.pkgSymbol.pkgID)) { return recordType; } SymbolEnv pkgEnv = symTable.pkgEnvMap.get(packageCache.getSymbol(pkgId)); return getType(recordType, pkgEnv, names.fromString(recordName)); case TypeTags.TYPEDESC: BTypedescType typedescType = new BTypedescType(null, symTable.typeDesc.tsymbol); typedescType.constraint = readTypeFromCp(); typedescType.flags = flags; return typedescType; case TypeTags.TYPEREFDESC: int pkgIndex = inputStream.readInt(); PackageID pkg = getPackageId(pkgIndex); BPackageSymbol pkgSymbol = pkg.equals(env.pkgSymbol.pkgID) ? env.pkgSymbol : packageCache.getSymbol(pkg); String typeDefName = getStringCPEntryValue(inputStream); BTypeSymbol typeSymbol = Symbols.createTypeSymbol(SymTag.TYPE_REF, Flags.asMask(EnumSet.of(Flag.PUBLIC)), names.fromString(typeDefName), pkg, null, pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); boolean nullable = (typeFlags & TypeFlags.NILABLE) == TypeFlags.NILABLE; BTypeReferenceType typeReferenceType = new BTypeReferenceType(null, typeSymbol, flags, nullable); addShapeCP(typeReferenceType, cpI); compositeStack.push(typeReferenceType); typeReferenceType.referredType = readTypeFromCp(); Object poppedRefType = compositeStack.pop(); assert poppedRefType == typeReferenceType; return typeReferenceType; case TypeTags.PARAMETERIZED_TYPE: BParameterizedType type = new BParameterizedType(null, null, null, name, -1); type.paramValueType = readTypeFromCp(); type.flags = flags; type.paramIndex = inputStream.readInt(); return type; case TypeTags.STREAM: BStreamType bStreamType = new BStreamType(TypeTags.STREAM, null, null, symTable.streamType.tsymbol); bStreamType.constraint = readTypeFromCp(); bStreamType.completionType = readTypeFromCp(); bStreamType.flags = flags; return bStreamType; case TypeTags.TABLE: BTableType bTableType = new BTableType(TypeTags.TABLE, null, symTable.tableType.tsymbol, flags); bTableType.constraint = readTypeFromCp(); boolean hasFieldNameList = inputStream.readByte() == 1; if (hasFieldNameList) { int fieldNameListSize = inputStream.readInt(); bTableType.fieldNameList = new ArrayList<>(fieldNameListSize); for (int i = 0; i < fieldNameListSize; i++) { String fieldName = getStringCPEntryValue(inputStream); bTableType.fieldNameList.add(fieldName); } } boolean hasKeyConstraint = inputStream.readByte() == 1; if (hasKeyConstraint) { bTableType.keyTypeConstraint = readTypeFromCp(); if (bTableType.keyTypeConstraint.tsymbol == null) { bTableType.keyTypeConstraint.tsymbol = Symbols.createTypeSymbol(SymTag.TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, bTableType.keyTypeConstraint, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); } } return bTableType; case TypeTags.MAP: BMapType bMapType = new BMapType(TypeTags.MAP, null, symTable.mapType.tsymbol, flags); bMapType.constraint = readTypeFromCp(); return bMapType; case TypeTags.INVOKABLE: BInvokableType bInvokableType = new BInvokableType(null, null, null, null); bInvokableType.flags = flags; if (inputStream.readBoolean()) { return bInvokableType; } int paramCount = inputStream.readInt(); List<BType> paramTypes = new ArrayList<>(paramCount); for (int i = 0; i < paramCount; i++) { paramTypes.add(readTypeFromCp()); } bInvokableType.paramTypes = paramTypes; if (inputStream.readBoolean()) { bInvokableType.restType = readTypeFromCp(); } bInvokableType.retType = readTypeFromCp(); return bInvokableType; case TypeTags.ANY: BType anyNominalType = typeParamAnalyzer.getNominalType(symTable.anyType, name, flags); return isImmutable(flags) ? getEffectiveImmutableType(anyNominalType, symTable.anyType.tsymbol.pkgID, symTable.anyType.tsymbol.owner) : anyNominalType; case TypeTags.HANDLE: return symTable.handleType; case TypeTags.READONLY: return symTable.readonlyType; case TypeTags.ENDPOINT: break; case TypeTags.ARRAY: byte state = inputStream.readByte(); int size = inputStream.readInt(); BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); BArrayType bArrayType = new BArrayType(null, arrayTypeSymbol, size, BArrayState.valueOf(state), flags); bArrayType.eType = readTypeFromCp(); return bArrayType; case TypeTags.UNION: boolean isCyclic = inputStream.readByte() == 1; boolean hasName = inputStream.readByte() == 1; PackageID unionsPkgId = env.pkgSymbol.pkgID; Name unionName = Names.EMPTY; if (hasName) { pkgCpIndex = inputStream.readInt(); unionsPkgId = getPackageId(pkgCpIndex); String unionNameStr = getStringCPEntryValue(inputStream); unionName = names.fromString(unionNameStr); } BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), unionName, unionsPkgId, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); int unionMemberCount = inputStream.readInt(); BUnionType unionType = BUnionType.create(unionTypeSymbol, new LinkedHashSet<>(unionMemberCount)); unionType.name = unionName; addShapeCP(unionType, cpI); compositeStack.push(unionType); unionType.flags = flags; unionType.isCyclic = isCyclic; for (int i = 0; i < unionMemberCount; i++) { unionType.add(readTypeFromCp()); } int unionOriginalMemberCount = inputStream.readInt(); LinkedHashSet<BType> originalMemberTypes = new LinkedHashSet<>(unionOriginalMemberCount); for (int i = 0; i < unionOriginalMemberCount; i++) { originalMemberTypes.add(readTypeFromCp()); } unionType.setOriginalMemberTypes(originalMemberTypes); var poppedUnionType = compositeStack.pop(); assert poppedUnionType == unionType; boolean isEnum = inputStream.readBoolean(); if (isEnum) { readAndSetEnumSymbol(unionType, flags); } if (hasName) { if (unionsPkgId.equals(env.pkgSymbol.pkgID)) { return unionType; } else { pkgEnv = symTable.pkgEnvMap.get(packageCache.getSymbol(unionsPkgId)); if (pkgEnv != null) { BType existingUnionType = getType(unionType, pkgEnv, unionName); if (existingUnionType != symTable.noType) { return existingUnionType; } } } } return unionType; case TypeTags.INTERSECTION: BTypeSymbol intersectionTypeSymbol = Symbols.createTypeSymbol(SymTag.INTERSECTION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); int intersectionMemberCount = inputStream.readInt(); LinkedHashSet<BType> constituentTypes = new LinkedHashSet<>(intersectionMemberCount); for (int i = 0; i < intersectionMemberCount; i++) { constituentTypes.add(readTypeFromCp()); } IntersectableReferenceType effectiveType = (IntersectableReferenceType) readTypeFromCp(); return new BIntersectionType(intersectionTypeSymbol, constituentTypes, effectiveType, flags); case TypeTags.PACKAGE: break; case TypeTags.NONE: return symTable.noType; case TypeTags.VOID: break; case TypeTags.XMLNS: break; case TypeTags.ANNOTATION: break; case TypeTags.SEMANTIC_ERROR: break; case TypeTags.ERROR: pkgCpIndex = inputStream.readInt(); pkgId = getPackageId(pkgCpIndex); BPackageSymbol owner = packageCache.getSymbol(pkgId); BTypeSymbol errorSymbol; if (owner != null) { errorSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.EMPTY, owner.pkgID, null, owner, symTable.builtinPos, COMPILED_SOURCE); } else { errorSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); } BErrorType errorType = new BErrorType(errorSymbol); addShapeCP(errorType, cpI); compositeStack.push(errorType); String errorName = getStringCPEntryValue(inputStream); BType detailsType = readTypeFromCp(); errorType.detailType = detailsType; errorType.flags = flags; errorSymbol.type = errorType; errorSymbol.pkgID = pkgId; errorSymbol.originalName = errorSymbol.name = names.fromString(errorName); Object poppedErrorType = compositeStack.pop(); assert poppedErrorType == errorType; if (!env.pkgSymbol.pkgID.equals(PackageID.ANNOTATIONS) && Symbols.isFlagOn(flags, Flags.NATIVE)) { return symTable.errorType; } errorType.typeIdSet = readTypeIdSet(inputStream); return errorType; case TypeTags.ITERATOR: break; case TypeTags.TUPLE: BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); BTupleType bTupleType = new BTupleType(tupleTypeSymbol, null); bTupleType.flags = flags; int tupleMemberCount = inputStream.readInt(); List<BType> tupleMemberTypes = new ArrayList<>(tupleMemberCount); for (int i = 0; i < tupleMemberCount; i++) { tupleMemberTypes.add(readTypeFromCp()); } bTupleType.tupleTypes = tupleMemberTypes; if (inputStream.readBoolean()) { bTupleType.restType = readTypeFromCp(); } return bTupleType; case TypeTags.FUTURE: BFutureType bFutureType = new BFutureType(TypeTags.FUTURE, null, symTable.futureType.tsymbol); bFutureType.constraint = readTypeFromCp(); bFutureType.flags = flags; return bFutureType; case TypeTags.FINITE: String finiteTypeName = getStringCPEntryValue(inputStream); var finiteTypeFlags = inputStream.readLong(); BTypeSymbol symbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteTypeFlags, names.fromString(finiteTypeName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); symbol.scope = new Scope(symbol); BFiniteType finiteType = new BFiniteType(symbol); finiteType.flags = flags; symbol.type = finiteType; int valueSpaceSize = inputStream.readInt(); for (int i = 0; i < valueSpaceSize; i++) { defineValueSpace(inputStream, finiteType, this); } return finiteType; case TypeTags.OBJECT: boolean service = inputStream.readByte() == 1; pkgCpIndex = inputStream.readInt(); pkgId = getPackageId(pkgCpIndex); String objName = getStringCPEntryValue(inputStream); var objFlags = (inputStream.readBoolean() ? Flags.CLASS : 0) | Flags.PUBLIC; objFlags = inputStream.readBoolean() ? objFlags | Flags.CLIENT : objFlags; BObjectTypeSymbol objectSymbol; if (Symbols.isFlagOn(objFlags, Flags.CLASS)) { objectSymbol = Symbols.createClassSymbol(objFlags, names.fromString(objName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE, false); } else { objectSymbol = Symbols.createObjectSymbol(objFlags, names.fromString(objName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); } objectSymbol.scope = new Scope(objectSymbol); BObjectType objectType; objectType = new BObjectType(objectSymbol); if (service) { objectType.flags |= Flags.SERVICE; objectSymbol.flags |= Flags.SERVICE; } if (isImmutable(flags)) { objectSymbol.flags |= Flags.READONLY; } if (Symbols.isFlagOn(flags, Flags.ANONYMOUS)) { objectSymbol.flags |= Flags.ANONYMOUS; } objectType.flags = flags; objectSymbol.type = objectType; addShapeCP(objectType, cpI); compositeStack.push(objectType); int fieldCount = inputStream.readInt(); for (int i = 0; i < fieldCount; i++) { String fieldName = getStringCPEntryValue(inputStream); var fieldFlags = inputStream.readLong(); var defaultable = inputStream.readBoolean(); byte[] docBytes = readDocBytes(inputStream); BType fieldType = readTypeFromCp(); BVarSymbol objectVarSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName), objectSymbol.pkgID, fieldType, objectSymbol.scope.owner, symTable.builtinPos, COMPILED_SOURCE); objectVarSymbol.isDefaultable = defaultable; defineMarkDownDocAttachment(objectVarSymbol, docBytes); BField structField = new BField(objectVarSymbol.name, null, objectVarSymbol); objectType.fields.put(structField.name.value, structField); objectSymbol.scope.define(objectVarSymbol.name, objectVarSymbol); } boolean generatedConstructorPresent = inputStream.readBoolean(); if (generatedConstructorPresent) { ignoreAttachedFunc(); } boolean constructorPresent = inputStream.readBoolean(); if (constructorPresent) { ignoreAttachedFunc(); } int funcCount = inputStream.readInt(); for (int i = 0; i < funcCount; i++) { if (isImmutable(objectSymbol.flags) && Symbols.isFlagOn(flags, Flags.ANONYMOUS)) { populateIntersectionTypeReferencedFunctions(inputStream, objectSymbol); } else { ignoreAttachedFunc(); } } objectType.typeInclusions = readTypeInclusions(); objectType.typeIdSet = readTypeIdSet(inputStream); Object poppedObjType = compositeStack.pop(); assert poppedObjType == objectType; if (pkgId.equals(env.pkgSymbol.pkgID)) { return objectType; } pkgEnv = symTable.pkgEnvMap.get(packageCache.getSymbol(pkgId)); return getType(objectType, pkgEnv, names.fromString(objName)); case TypeTags.BYTE_ARRAY: break; case TypeTags.FUNCTION_POINTER: break; case SERVICE_TYPE_TAG: throw new AssertionError(); case TypeTags.SIGNED32_INT: return symTable.signed32IntType; case TypeTags.SIGNED16_INT: return symTable.signed16IntType; case TypeTags.SIGNED8_INT: return symTable.signed8IntType; case TypeTags.UNSIGNED32_INT: return symTable.unsigned32IntType; case TypeTags.UNSIGNED16_INT: return symTable.unsigned16IntType; case TypeTags.UNSIGNED8_INT: return symTable.unsigned8IntType; case TypeTags.CHAR_STRING: return symTable.charStringType; case TypeTags.XML_ELEMENT: return isImmutable(flags) ? getEffectiveImmutableType(symTable.xmlElementType) : symTable.xmlElementType; case TypeTags.XML_PI: return isImmutable(flags) ? getEffectiveImmutableType(symTable.xmlPIType) : symTable.xmlPIType; case TypeTags.XML_COMMENT: return isImmutable(flags) ? getEffectiveImmutableType(symTable.xmlCommentType) : symTable.xmlCommentType; case TypeTags.XML_TEXT: return symTable.xmlTextType; } return null; } private BTypeIdSet readTypeIdSet(DataInputStream inputStream) throws IOException { Set<BTypeIdSet.BTypeId> primary = new HashSet<>(); int primaryTypeIdCount = inputStream.readInt(); for (int i = 0; i < primaryTypeIdCount; i++) { primary.add(readTypeId(inputStream)); } Set<BTypeIdSet.BTypeId> secondary = new HashSet<>(); int secondaryTypeIdCount = inputStream.readInt(); for (int i = 0; i < secondaryTypeIdCount; i++) { secondary.add(readTypeId(inputStream)); } return new BTypeIdSet(primary, secondary); } private BTypeIdSet.BTypeId readTypeId(DataInputStream inputStream) throws IOException { int pkgCPIndex = inputStream.readInt(); PackageID packageId = getPackageId(pkgCPIndex); String name = getStringCPEntryValue(inputStream); boolean isPublicTypeId = inputStream.readBoolean(); return new BTypeIdSet.BTypeId(packageId, name, isPublicTypeId); } private void ignoreAttachedFunc() throws IOException { getStringCPEntryValue(inputStream); getStringCPEntryValue(inputStream); inputStream.readLong(); readTypeFromCp(); } private List<BType> readTypeInclusions() throws IOException { int nTypeInclusions = inputStream.readInt(); List<BType> typeInclusions = new ArrayList<>(nTypeInclusions); for (int i = 0; i < nTypeInclusions; i++) { BType inclusion = readTypeFromCp(); typeInclusions.add(inclusion); } return typeInclusions; } private void readAndSetEnumSymbol(BUnionType unionType, long flags) throws IOException { PackageID enumPkgId = getPackageId(inputStream.readInt()); String enumName = getStringCPEntryValue(inputStream); int memberCount = inputStream.readInt(); BSymbol pkgSymbol = packageCache.getSymbol(enumPkgId); if (pkgSymbol == null) { pkgSymbol = env.pkgSymbol; } SymbolEnv enumPkgEnv = symTable.pkgEnvMap.get(pkgSymbol); if (enumPkgEnv == null) { enumPkgEnv = SymbolEnv.createPkgEnv(null, env.pkgSymbol.scope, null); } List<BConstantSymbol> members = new ArrayList<>(); for (int i = 0; i < memberCount; i++) { String memName = getStringCPEntryValue(inputStream); BSymbol sym = symbolResolver.lookupSymbolInMainSpace(enumPkgEnv, names.fromString(memName)); members.add((BConstantSymbol) sym); } unionType.tsymbol = new BEnumSymbol(members, flags, names.fromString(enumName), pkgSymbol.pkgID, unionType, pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); } private void populateIntersectionTypeReferencedFunctions(DataInputStream inputStream, BObjectTypeSymbol objectSymbol) throws IOException { String attachedFuncName = getStringCPEntryValue(inputStream); String attachedFuncOrigName = getStringCPEntryValue(inputStream); var attachedFuncFlags = inputStream.readLong(); if (Symbols.isFlagOn(attachedFuncFlags, Flags.INTERFACE) && Symbols.isFlagOn(attachedFuncFlags, Flags.ATTACHED)) { BInvokableType attachedFuncType = (BInvokableType) readTypeFromCp(); Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName( objectSymbol.name.value, attachedFuncName)); Name funcOrigName = names.fromString(attachedFuncOrigName); BInvokableSymbol attachedFuncSymbol = Symbols.createFunctionSymbol(attachedFuncFlags, funcName, funcOrigName, env.pkgSymbol.pkgID, attachedFuncType, env.pkgSymbol, false, symTable.builtinPos, COMPILED_SOURCE); BAttachedFunction attachedFunction = new BAttachedFunction(names.fromString(attachedFuncName), attachedFuncSymbol, attachedFuncType, symTable.builtinPos); setInvokableTypeSymbol(attachedFuncType); if (!Symbols.isFlagOn(attachedFuncType.flags, Flags.ANY_FUNCTION)) { BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) attachedFuncType.tsymbol; attachedFuncSymbol.params = tsymbol.params; attachedFuncSymbol.restParam = tsymbol.restParam; attachedFuncSymbol.retType = tsymbol.returnType; } objectSymbol.referencedFunctions.add(attachedFunction); objectSymbol.attachedFuncs.add(attachedFunction); objectSymbol.scope.define(funcName, attachedFuncSymbol); } } } private BType getType(BType readShape, SymbolEnv pkgEnv, Name name) { BType type = symbolResolver.lookupSymbolInMainSpace(pkgEnv, name).type; if (type != symTable.noType && (!name.value.contains(ANON_PREFIX) || types.isSameBIRShape(readShape, type))) { return type; } if (pkgEnv.node != null) { for (BLangTypeDefinition typeDefinition : ((BLangPackage) pkgEnv.node).typeDefinitions) { BSymbol symbol = typeDefinition.symbol; String typeDefName = typeDefinition.name.value; if (typeDefName.contains(ANON_PREFIX)) { BType anonType = symbol.type; if (types.isSameBIRShape(readShape, anonType)) { return anonType; } } else if (typeDefName.equals(name.value)) { return symbol.type; } } } else { for (Map.Entry<Name, Scope.ScopeEntry> value : pkgEnv.scope.entries.entrySet()) { BSymbol symbol = value.getValue().symbol; if (value.getKey().value.contains(ANON_PREFIX)) { BType anonType = symbol.type; if (types.isSameBIRShape(readShape, anonType)) { return anonType; } } } } return type; } private byte[] readDocBytes(DataInputStream inputStream) throws IOException { int docLength = inputStream.readInt(); byte[] docBytes = new byte[docLength]; int noOfBytesRead = inputStream.read(docBytes); if (docLength != noOfBytesRead) { throw new RuntimeException("failed to read Markdown Documentation"); } return docBytes; } private PackageID getPackageId(int pkgCPIndex) { PackageCPEntry pkgCpEntry = (PackageCPEntry) env.constantPool[pkgCPIndex]; String orgName = ((StringCPEntry) env.constantPool[pkgCpEntry.orgNameCPIndex]).value; String pkgName = ((StringCPEntry) env.constantPool[pkgCpEntry.pkgNameCPIndex]).value; String moduleName = ((StringCPEntry) env.constantPool[pkgCpEntry.moduleNameCPIndex]).value; String version = ((StringCPEntry) env.constantPool[pkgCpEntry.versionCPIndex]).value; return new PackageID(names.fromString(orgName), names.fromString(pkgName), names.fromString(moduleName), names.fromString(version), null); } private void defineValueSpace(DataInputStream dataInStream, BFiniteType finiteType, BIRTypeReader typeReader) throws IOException { BType valueType = typeReader.readTypeFromCp(); dataInStream.readInt(); BLangLiteral litExpr = createLiteralBasedOnType(valueType); switch (valueType.tag) { case TypeTags.INT: int integerCpIndex = dataInStream.readInt(); IntegerCPEntry integerCPEntry = (IntegerCPEntry) this.env.constantPool[integerCpIndex]; litExpr.value = integerCPEntry.value; break; case TypeTags.BYTE: int byteCpIndex = dataInStream.readInt(); ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex]; litExpr.value = byteCPEntry.value; break; case TypeTags.FLOAT: int floatCpIndex = dataInStream.readInt(); FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex]; litExpr.value = Double.toString(floatCPEntry.value); break; case TypeTags.STRING: case TypeTags.DECIMAL: litExpr.value = getStringCPEntryValue(dataInStream); break; case TypeTags.BOOLEAN: litExpr.value = dataInStream.readBoolean(); break; case TypeTags.NIL: litExpr.originalValue = "null"; break; default: throw new UnsupportedOperationException("finite type value is not supported for type: " + valueType); } litExpr.setBType(valueType); finiteType.addValue(litExpr); } private BLangLiteral createLiteralBasedOnType(BType valueType) { NodeKind nodeKind = valueType.tag <= TypeTags.DECIMAL ? NodeKind.NUMERIC_LITERAL : NodeKind.LITERAL; return nodeKind == NodeKind.LITERAL ? (BLangLiteral) TreeBuilder.createLiteralExpression() : (BLangLiteral) TreeBuilder.createNumericLiteralExpression(); } private boolean isImmutable(long flags) { return Symbols.isFlagOn(flags, Flags.READONLY); } private BType getEffectiveImmutableType(BType type) { return ImmutableTypeCloner.getEffectiveImmutableType(null, types, type, type.tsymbol.pkgID, type.tsymbol.owner, symTable, null, names); } private BType getEffectiveImmutableType(BType type, PackageID pkgID, BSymbol owner) { return ImmutableTypeCloner.getEffectiveImmutableType(null, types, type, pkgID, owner, symTable, null, names); } }
class BIRPackageSymbolEnter { private final PackageCache packageCache; private final SymbolResolver symbolResolver; private final SymbolTable symTable; private final Names names; private final TypeParamAnalyzer typeParamAnalyzer; private final Types types; private BIRTypeReader typeReader; private BIRPackageSymbolEnv env; private List<BStructureTypeSymbol> structureTypes; private BStructureTypeSymbol currentStructure = null; private LinkedList<Object> compositeStack = new LinkedList<>(); private static final int SERVICE_TYPE_TAG = 53; private static final CompilerContext.Key<BIRPackageSymbolEnter> COMPILED_PACKAGE_SYMBOL_ENTER_KEY = new CompilerContext.Key<>(); private Map<String, BVarSymbol> globalVarMap = new HashMap<>(); public static BIRPackageSymbolEnter getInstance(CompilerContext context) { BIRPackageSymbolEnter packageReader = context.get(COMPILED_PACKAGE_SYMBOL_ENTER_KEY); if (packageReader == null) { packageReader = new BIRPackageSymbolEnter(context); } return packageReader; } private BIRPackageSymbolEnter(CompilerContext context) { context.put(COMPILED_PACKAGE_SYMBOL_ENTER_KEY, this); this.packageCache = PackageCache.getInstance(context); this.symbolResolver = SymbolResolver.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.names = Names.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.types = Types.getInstance(context); } public BPackageSymbol definePackage(PackageID packageId, byte[] packageBinaryContent) { BPackageSymbol pkgSymbol = definePackage(packageId, new ByteArrayInputStream(packageBinaryContent)); byte[] modifiedPkgBinaryContent = Arrays.copyOfRange( packageBinaryContent, 8, packageBinaryContent.length); pkgSymbol.birPackageFile = new CompiledBinaryFile.BIRPackageFile(modifiedPkgBinaryContent); SymbolEnv builtinEnv = this.symTable.pkgEnvMap.get(symTable.langAnnotationModuleSymbol); SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(null, pkgSymbol.scope, builtinEnv); this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv); return pkgSymbol; } private BPackageSymbol definePackage(PackageID packageId, InputStream programFileInStream) { try (DataInputStream dataInStream = new DataInputStream(programFileInStream)) { BIRPackageSymbolEnv prevEnv = this.env; this.env = new BIRPackageSymbolEnv(); this.env.requestedPackageId = packageId; BPackageSymbol pkgSymbol = definePackage(dataInStream); this.env = prevEnv; return pkgSymbol; } catch (Throwable e) { throw new BLangCompilerException("failed to load the module '" + packageId.toString() + "' from its BIR" + (e.getMessage() != null ? (" due to: " + e.getMessage()) : ""), e); } } private BPackageSymbol definePackage(DataInputStream dataInStream) throws IOException { byte[] magic = new byte[4]; dataInStream.read(magic, 0, 4); if (!Arrays.equals(magic, BIRPackageFile.BIR_MAGIC)) { throw new BLangCompilerException("invalid magic number " + Arrays.toString(magic)); } int version = dataInStream.readInt(); if (version != BIRPackageFile.BIR_VERSION) { throw new BLangCompilerException("unsupported program file version " + version); } this.env.constantPool = readConstantPool(dataInStream); int pkgCPIndex = dataInStream.readInt(); return definePackage(dataInStream, pkgCPIndex); } private BPackageSymbol definePackage(DataInputStream dataInStream, int pkgCpIndex) throws IOException { PackageCPEntry pkgCpEntry = (PackageCPEntry) this.env.constantPool[pkgCpIndex]; String orgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.orgNameCPIndex]).value; String pkgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.pkgNameCPIndex]).value; String moduleName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.moduleNameCPIndex]).value; String pkgVersion = ((StringCPEntry) this.env.constantPool[pkgCpEntry.versionCPIndex]).value; PackageID pkgId = createPackageID(orgName, pkgName, moduleName, pkgVersion); this.env.pkgSymbol = Symbols.createPackageSymbol(pkgId, this.symTable, COMPILED_SOURCE); defineSymbols(dataInStream, rethrow(this::defineImportPackage)); defineSymbols(dataInStream, rethrow(this::defineConstant)); this.structureTypes = new ArrayList<>(); defineSymbols(dataInStream, rethrow(this::defineTypeDef)); defineSymbols(dataInStream, rethrow(this::definePackageLevelVariables)); readTypeDefBodies(dataInStream); defineSymbols(dataInStream, rethrow(this::defineFunction)); defineSymbols(dataInStream, rethrow(this::defineAnnotations)); defineSymbols(dataInStream, rethrow(this::defineServiceDeclarations)); populateReferencedFunctions(); this.typeReader = null; return this.env.pkgSymbol; } private void populateReferencedFunctions() { for (BStructureTypeSymbol structureTypeSymbol : this.structureTypes) { if (structureTypeSymbol.type.tag == TypeTags.OBJECT) { BObjectType objectType = (BObjectType) structureTypeSymbol.type; for (BType ref : objectType.typeInclusions) { BType typeRef = Types.getReferredType(ref); if (typeRef.tsymbol == null || typeRef.tsymbol.kind != SymbolKind.OBJECT) { continue; } List<BAttachedFunction> attachedFunctions = ((BObjectTypeSymbol) typeRef.tsymbol).attachedFuncs; for (BAttachedFunction function : attachedFunctions) { if (Symbols.isPrivate(function.symbol)) { continue; } String referencedFuncName = function.funcName.value; Name funcName = names.fromString( Symbols.getAttachedFuncSymbolName(structureTypeSymbol.name.value, referencedFuncName)); Scope.ScopeEntry matchingObjFuncSym = objectType.tsymbol.scope.lookup(funcName); if (matchingObjFuncSym == NOT_FOUND_ENTRY) { structureTypeSymbol.attachedFuncs.add(function); ((BObjectTypeSymbol) structureTypeSymbol).referencedFunctions.add(function); } } } } } } private void readTypeDefBodies(DataInputStream dataInStream) throws IOException { dataInStream.readInt(); for (BStructureTypeSymbol structureTypeSymbol : this.structureTypes) { this.currentStructure = structureTypeSymbol; defineSymbols(dataInStream, rethrow(this::defineFunction)); defineSymbols(dataInStream, rethrow(this::readBType)); } this.currentStructure = null; } private CPEntry[] readConstantPool(DataInputStream dataInStream) throws IOException { int constantPoolSize = dataInStream.readInt(); CPEntry[] constantPool = new CPEntry[constantPoolSize]; this.env.constantPool = constantPool; for (int i = 0; i < constantPoolSize; i++) { byte cpTag = dataInStream.readByte(); CPEntry.Type cpEntryType = CPEntry.Type.values()[cpTag - 1]; constantPool[i] = readCPEntry(dataInStream, constantPool, cpEntryType, i); } return constantPool; } private CPEntry readCPEntry(DataInputStream dataInStream, CPEntry[] constantPool, CPEntry.Type cpEntryType, int i) throws IOException { switch (cpEntryType) { case CP_ENTRY_INTEGER: return new CPEntry.IntegerCPEntry(dataInStream.readLong()); case CP_ENTRY_FLOAT: return new CPEntry.FloatCPEntry(dataInStream.readDouble()); case CP_ENTRY_BOOLEAN: return new CPEntry.BooleanCPEntry(dataInStream.readBoolean()); case CP_ENTRY_STRING: int length = dataInStream.readInt(); String strValue = null; if (length >= 0) { byte[] bytes = new byte[length]; dataInStream.read(bytes, 0, length); strValue = new String(bytes); } return new CPEntry.StringCPEntry(strValue); case CP_ENTRY_PACKAGE: return new CPEntry.PackageCPEntry(dataInStream.readInt(), dataInStream.readInt(), dataInStream.readInt(), dataInStream.readInt()); case CP_ENTRY_SHAPE: env.unparsedBTypeCPs.put(i, readByteArray(dataInStream)); return null; case CP_ENTRY_BYTE: return new CPEntry.ByteCPEntry(dataInStream.readInt()); default: throw new IllegalStateException("unsupported constant pool entry type: " + cpEntryType.name()); } } private byte[] readByteArray(DataInputStream dataInStream) throws IOException { int length = dataInStream.readInt(); byte[] bytes = new byte[length]; dataInStream.readFully(bytes); return bytes; } private void defineSymbols(DataInputStream dataInStream, Consumer<DataInputStream> symbolDefineFunc) throws IOException { int symbolCount = dataInStream.readInt(); for (int i = 0; i < symbolCount; i++) { symbolDefineFunc.accept(dataInStream); } } private void defineFunction(DataInputStream dataInStream) throws IOException { Location pos = readPosition(dataInStream); String funcName = getStringCPEntryValue(dataInStream); String funcOrigName = getStringCPEntryValue(dataInStream); String workerName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); BInvokableType funcType = (BInvokableType) readBType(dataInStream); BInvokableSymbol invokableSymbol = Symbols.createFunctionSymbol(flags, names.fromString(funcName), names.fromString(funcOrigName), this.env.pkgSymbol.pkgID, funcType, this.env.pkgSymbol, Symbols.isFlagOn(flags, Flags.NATIVE), pos, toOrigin(origin)); invokableSymbol.source = pos.lineRange().filePath(); invokableSymbol.retType = funcType.retType; Scope scopeToDefine = this.env.pkgSymbol.scope; if (this.currentStructure != null) { BType attachedType = Types.getReferredType(this.currentStructure.type); invokableSymbol.owner = attachedType.tsymbol; invokableSymbol.name = names.fromString(Symbols.getAttachedFuncSymbolName(attachedType.tsymbol.name.value, funcName)); if (attachedType.tag == TypeTags.OBJECT || attachedType.tag == TypeTags.RECORD) { scopeToDefine = attachedType.tsymbol.scope; BAttachedFunction attachedFunc = new BAttachedFunction(names.fromString(funcName), invokableSymbol, funcType, symTable.builtinPos); BStructureTypeSymbol structureTypeSymbol = (BStructureTypeSymbol) attachedType.tsymbol; if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcName) || funcName.equals(Names.INIT_FUNCTION_SUFFIX.value)) { structureTypeSymbol.initializerFunc = attachedFunc; } else if (funcName.equals(Names.GENERATED_INIT_SUFFIX.value)) { ((BObjectTypeSymbol) structureTypeSymbol).generatedInitializerFunc = attachedFunc; } else { structureTypeSymbol.attachedFuncs.add(attachedFunc); } } } dataInStream.skip(dataInStream.readLong()); dataInStream.skip(dataInStream.readLong()); setParamSymbols(invokableSymbol, dataInStream); defineMarkDownDocAttachment(invokableSymbol, readDocBytes(dataInStream)); defineGlobalVarDependencies(invokableSymbol, dataInStream); dataInStream.skip(dataInStream.readLong()); dataInStream.skip(dataInStream.readLong()); scopeToDefine.define(invokableSymbol.name, invokableSymbol); } private void defineGlobalVarDependencies(BInvokableSymbol invokableSymbol, DataInputStream dataInStream) throws IOException { long length = dataInStream.readInt(); for (int i = 0; i < length; i++) { String globalVarName = getStringCPEntryValue(dataInStream.readInt()); invokableSymbol.dependentGlobalVars.add(this.globalVarMap.get(globalVarName)); } } private void defineTypeDef(DataInputStream dataInStream) throws IOException { Location pos = readPosition(dataInStream); String typeDefName = getStringCPEntryValue(dataInStream); String typeDefOrigName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); byte[] docBytes = readDocBytes(dataInStream); BType type = readBType(dataInStream); BTypeReferenceType referenceType = null; boolean hasReferenceType = dataInStream.readBoolean(); if (hasReferenceType) { BTypeSymbol typeSymbol = new BTypeSymbol(SymTag.TYPE_REF, flags, names.fromString(typeDefName), this.env.pkgSymbol.pkgID, type, this.env.pkgSymbol, pos, COMPILED_SOURCE); referenceType = new BTypeReferenceType(type, typeSymbol, flags); } if (type.tag == TypeTags.INVOKABLE) { setInvokableTypeSymbol((BInvokableType) type); } boolean isClass = Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS); flags = isClass ? flags | Flags.CLASS : flags; flags = Symbols.isFlagOn(type.tsymbol.flags, Flags.CLIENT) ? flags | Flags.CLIENT : flags; BSymbol symbol; boolean isEnum = Symbols.isFlagOn(type.tsymbol.flags, Flags.ENUM); if (isClass || isEnum) { symbol = type.tsymbol; symbol.pos = pos; } else { symbol = Symbols.createTypeDefinitionSymbol(flags, names.fromString(typeDefName), this.env.pkgSymbol.pkgID, type, this.env.pkgSymbol, pos, COMPILED_SOURCE); ((BTypeDefinitionSymbol) symbol).referenceType = referenceType; } symbol.originalName = names.fromString(typeDefOrigName); symbol.origin = toOrigin(origin); symbol.flags = flags; defineMarkDownDocAttachment(symbol, docBytes); defineAnnotAttachmentSymbols(dataInStream, (isClass || isEnum || symbol.tag == SymTag.TYPE_DEF) ? (Annotatable) symbol : null); if (type.tsymbol.name == Names.EMPTY && type.tag != TypeTags.INVOKABLE) { type.tsymbol.name = symbol.name; type.tsymbol.originalName = symbol.originalName; } if (type.tag == TypeTags.RECORD || type.tag == TypeTags.OBJECT) { if (!isClass) { ((BStructureTypeSymbol) type.tsymbol).typeDefinitionSymbol = (BTypeDefinitionSymbol) symbol; } type.tsymbol.origin = toOrigin(origin); this.structureTypes.add((BStructureTypeSymbol) type.tsymbol); } this.env.pkgSymbol.scope.define(symbol.name, symbol); } private void skipPosition(DataInputStream dataInStream) throws IOException { for (int i = 0; i < 4; i++) { dataInStream.readInt(); } } private void setInvokableTypeSymbol(BInvokableType invokableType) { if (Symbols.isFlagOn(invokableType.flags, Flags.ANY_FUNCTION)) { return; } BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol; List<BVarSymbol> params = new ArrayList<>(invokableType.paramTypes.size()); for (BType paramType : invokableType.paramTypes) { BVarSymbol varSymbol = new BVarSymbol(paramType.flags, Names.EMPTY, this.env.pkgSymbol.pkgID, paramType, null, symTable.builtinPos, COMPILED_SOURCE); params.add(varSymbol); } tsymbol.params = params; if (invokableType.restType != null) { tsymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.pkgSymbol.pkgID, invokableType.restType, null, symTable.builtinPos, COMPILED_SOURCE); } tsymbol.returnType = invokableType.retType; } private void defineMarkDownDocAttachment(BSymbol symbol, byte[] docBytes) throws IOException { DataInputStream dataInStream = new DataInputStream(new ByteArrayInputStream(docBytes)); boolean docPresent = dataInStream.readBoolean(); if (!docPresent) { return; } int descCPIndex = dataInStream.readInt(); int retDescCPIndex = dataInStream.readInt(); int paramLength = dataInStream.readInt(); MarkdownDocAttachment markdownDocAttachment = new MarkdownDocAttachment(paramLength); markdownDocAttachment.description = descCPIndex >= 0 ? getStringCPEntryValue(descCPIndex) : null; markdownDocAttachment.returnValueDescription = retDescCPIndex >= 0 ? getStringCPEntryValue(retDescCPIndex) : null; readAndSetParamDocumentation(dataInStream, markdownDocAttachment.parameters, paramLength); int deprecatedDescCPIndex = dataInStream.readInt(); int deprecatedParamLength = dataInStream.readInt(); markdownDocAttachment.deprecatedDocumentation = deprecatedDescCPIndex >= 0 ? getStringCPEntryValue(deprecatedDescCPIndex) : null; readAndSetParamDocumentation(dataInStream, markdownDocAttachment.deprecatedParams, deprecatedParamLength); symbol.markdownDocumentation = markdownDocAttachment; } private void readAndSetParamDocumentation(DataInputStream inputStream, List<MarkdownDocAttachment.Parameter> params, int nParams) throws IOException { for (int i = 0; i < nParams; i++) { int nameCPIndex = inputStream.readInt(); int paramDescCPIndex = inputStream.readInt(); String name = nameCPIndex >= 0 ? getStringCPEntryValue(nameCPIndex) : null; String description = paramDescCPIndex >= 0 ? getStringCPEntryValue(paramDescCPIndex) : null; MarkdownDocAttachment.Parameter parameter = new MarkdownDocAttachment.Parameter(name, description); params.add(parameter); } } private BType readBType(DataInputStream dataInStream) throws IOException { int typeCpIndex = dataInStream.readInt(); CPEntry cpEntry = this.env.constantPool[typeCpIndex]; BType type = null; if (cpEntry != null) { type = ((CPEntry.ShapeCPEntry) cpEntry).shape; if (type.tag != TypeTags.INVOKABLE) { return type; } } if (type == null) { byte[] e = env.unparsedBTypeCPs.get(typeCpIndex); type = new BIRTypeReader(new DataInputStream(new ByteArrayInputStream(e))).readType(typeCpIndex); addShapeCP(type, typeCpIndex); } if (type.tag == TypeTags.INVOKABLE) { return createClonedInvokableTypeWithTsymbol((BInvokableType) type); } return type; } private BInvokableType createClonedInvokableTypeWithTsymbol(BInvokableType bInvokableType) { BInvokableType clonedType; if (Symbols.isFlagOn(bInvokableType.flags, Flags.ANY_FUNCTION)) { clonedType = new BInvokableType(null, null, null, null); } else { clonedType = new BInvokableType(bInvokableType.paramTypes, bInvokableType.restType, bInvokableType.retType, null); } clonedType.tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, bInvokableType.flags, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); clonedType.flags = bInvokableType.flags; return clonedType; } private void addShapeCP(BType bType, int typeCpIndex) { this.env.constantPool[typeCpIndex] = new CPEntry.ShapeCPEntry(bType); } private void defineAnnotations(DataInputStream dataInStream) throws IOException { BAnnotationSymbol annotationSymbol = defineAnnotation(dataInStream); this.env.pkgSymbol.scope.define(annotationSymbol.name, annotationSymbol); } private BAnnotationSymbol defineAnnotation(DataInputStream dataInStream) throws IOException { int pkgCpIndex = dataInStream.readInt(); PackageID pkgId = getPackageId(pkgCpIndex); String name = getStringCPEntryValue(dataInStream); String originalName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); Location pos = readPosition(dataInStream); int attachPointCount = dataInStream.readInt(); Set<AttachPoint> attachPoints = new HashSet<>(attachPointCount); for (int i = 0; i < attachPointCount; i++) { attachPoints.add(AttachPoint.getAttachmentPoint(getStringCPEntryValue(dataInStream), dataInStream.readBoolean())); } BType annotationType = readBType(dataInStream); BPackageSymbol pkgSymbol = pkgId.equals(env.pkgSymbol.pkgID) ? this.env.pkgSymbol : packageCache.getSymbol(pkgId); BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(flags, attachPoints, names.fromString(name), names.fromString(originalName), pkgId, null, pkgSymbol, pos, toOrigin(origin)); annotationSymbol.type = new BAnnotationType(annotationSymbol); defineMarkDownDocAttachment(annotationSymbol, readDocBytes(dataInStream)); defineAnnotAttachmentSymbols(dataInStream, annotationSymbol); if (annotationType != symTable.noType) { annotationSymbol.attachedType = annotationType; } return annotationSymbol; } private BAnnotationAttachmentSymbol defineAnnotationAttachmentSymbol(DataInputStream dataInStream, BSymbol owner) throws IOException { PackageID pkgId = getPackageId(dataInStream.readInt()); Location pos = readPosition(dataInStream); Name annotTagRef = Names.fromString(getStringCPEntryValue(dataInStream.readInt())); boolean constAnnotation = dataInStream.readBoolean(); if (!constAnnotation) { return new BAnnotationAttachmentSymbol(pkgId, annotTagRef, this.env.pkgSymbol.pkgID, owner, pos, COMPILED_SOURCE, null); } BType constantValType = readBType(dataInStream); BConstantSymbol constantSymbol = new BConstantSymbol(0, Names.EMPTY, Names.EMPTY, this.env.pkgSymbol.pkgID, null, constantValType, owner, pos, COMPILED_SOURCE); constantSymbol.value = readConstLiteralValue(constantValType, dataInStream); constantSymbol.literalType = constantSymbol.value.type; return new BAnnotationAttachmentSymbol.BConstAnnotationAttachmentSymbol(pkgId, annotTagRef, this.env.pkgSymbol.pkgID, owner, pos, COMPILED_SOURCE, constantSymbol, null); } private void defineConstant(DataInputStream dataInStream) throws IOException { String constantName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); Location pos = readPosition(dataInStream); byte[] docBytes = readDocBytes(dataInStream); BType type = readBType(dataInStream); Scope enclScope = this.env.pkgSymbol.scope; BConstantSymbol constantSymbol = new BConstantSymbol(flags, names.fromString(constantName), this.env.pkgSymbol.pkgID, null, type, enclScope.owner, pos, toOrigin(origin)); defineMarkDownDocAttachment(constantSymbol, docBytes); defineAnnotAttachmentSymbols(dataInStream, constantSymbol); dataInStream.readLong(); BType constantValType = readBType(dataInStream); constantSymbol.value = readConstLiteralValue(constantValType, dataInStream); constantSymbol.literalType = constantSymbol.value.type; enclScope.define(constantSymbol.name, constantSymbol); } private BLangConstantValue readConstLiteralValue(BType valueType, DataInputStream dataInStream) throws IOException { switch (valueType.tag) { case TypeTags.INT: return new BLangConstantValue(getIntCPEntryValue(dataInStream), symTable.intType); case TypeTags.BYTE: return new BLangConstantValue(getByteCPEntryValue(dataInStream), symTable.byteType); case TypeTags.FLOAT: return new BLangConstantValue(getFloatCPEntryValue(dataInStream), symTable.floatType); case TypeTags.STRING: return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.stringType); case TypeTags.DECIMAL: return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.decimalType); case TypeTags.BOOLEAN: return new BLangConstantValue(dataInStream.readBoolean(), symTable.booleanType); case TypeTags.NIL: return new BLangConstantValue(null, symTable.nilType); case TypeTags.RECORD: int size = dataInStream.readInt(); Map<String, BLangConstantValue> keyValuePairs = new LinkedHashMap<>(); for (int i = 0; i < size; i++) { String key = getStringCPEntryValue(dataInStream); BType type = readBType(dataInStream); BLangConstantValue value = readConstLiteralValue(type, dataInStream); keyValuePairs.put(key, value); } return new BLangConstantValue(keyValuePairs, valueType); case TypeTags.TUPLE: int tupleSize = dataInStream.readInt(); List<BLangConstantValue> members = new ArrayList<>(tupleSize); for (int i = 0; i < tupleSize; i++) { BType type = readBType(dataInStream); BLangConstantValue value = readConstLiteralValue(type, dataInStream); members.add(value); } return new BLangConstantValue(members, valueType); case TypeTags.INTERSECTION: return readConstLiteralValue(((BIntersectionType) valueType).effectiveType, dataInStream); case TypeTags.TYPEREFDESC: return readConstLiteralValue(Types.getReferredType(valueType), dataInStream); default: throw new RuntimeException("unexpected type: " + valueType); } } private void defineServiceDeclarations(DataInputStream inputStream) throws IOException { String serviceName = getStringCPEntryValue(inputStream); String associatedClassName = getStringCPEntryValue(inputStream); long flags = inputStream.readLong(); byte origin = inputStream.readByte(); Location pos = readPosition(inputStream); BType type = null; if (inputStream.readBoolean()) { type = readBType(inputStream); } List<String> attachPoint = null; if (inputStream.readBoolean()) { attachPoint = new ArrayList<>(); int nSegments = inputStream.readInt(); for (int i = 0; i < nSegments; i++) { attachPoint.add(getStringCPEntryValue(inputStream)); } } String attachPointLiteral = null; if (inputStream.readBoolean()) { attachPointLiteral = getStringCPEntryValue(inputStream); } BSymbol classSymbol = this.env.pkgSymbol.scope.lookup(names.fromString(associatedClassName)).symbol; BServiceSymbol serviceDecl = new BServiceSymbol((BClassSymbol) classSymbol, flags, names.fromString(serviceName), this.env.pkgSymbol.pkgID, type, this.env.pkgSymbol, pos, SymbolOrigin.toOrigin(origin)); int nListeners = inputStream.readInt(); for (int i = 0; i < nListeners; i++) { serviceDecl.addListenerType(readBType(inputStream)); } serviceDecl.setAttachPointStringLiteral(attachPointLiteral); serviceDecl.setAbsResourcePath(attachPoint); this.env.pkgSymbol.scope.define(names.fromString(serviceName), serviceDecl); } private void definePackageLevelVariables(DataInputStream dataInStream) throws IOException { dataInStream.readByte(); String varName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); byte origin = dataInStream.readByte(); byte[] docBytes = readDocBytes(dataInStream); BType varType = readBType(dataInStream); Scope enclScope = this.env.pkgSymbol.scope; BVarSymbol varSymbol; if (varType.tag == TypeTags.INVOKABLE) { BInvokableSymbol invokableSymbol = new BInvokableSymbol(SymTag.VARIABLE, flags, names.fromString(varName), this.env.pkgSymbol.pkgID, varType, enclScope.owner, symTable.builtinPos, toOrigin(origin)); invokableSymbol.kind = SymbolKind.FUNCTION; invokableSymbol.retType = ((BInvokableType) invokableSymbol.type).retType; varSymbol = invokableSymbol; } else { varSymbol = new BVarSymbol(flags, names.fromString(varName), this.env.pkgSymbol.pkgID, varType, enclScope.owner, symTable.builtinPos, toOrigin(origin)); if (varType.tsymbol != null && Symbols.isFlagOn(varType.tsymbol.flags, Flags.CLIENT)) { varSymbol.tag = SymTag.ENDPOINT; } } this.globalVarMap.put(varName, varSymbol); defineMarkDownDocAttachment(varSymbol, docBytes); defineAnnotAttachmentSymbols(dataInStream, varSymbol); enclScope.define(varSymbol.name, varSymbol); } private void setParamSymbols(BInvokableSymbol invokableSymbol, DataInputStream dataInStream) throws IOException { int requiredParamCount = dataInStream.readInt(); BInvokableType invokableType = (BInvokableType) invokableSymbol.type; for (int i = 0; i < requiredParamCount; i++) { String paramName = getStringCPEntryValue(dataInStream); var flags = dataInStream.readLong(); BVarSymbol varSymbol = new BVarSymbol(flags, names.fromString(paramName), this.env.pkgSymbol.pkgID, invokableType.paramTypes.get(i), invokableSymbol, symTable.builtinPos, COMPILED_SOURCE); varSymbol.isDefaultable = ((flags & Flags.OPTIONAL) == Flags.OPTIONAL); defineAnnotAttachmentSymbols(dataInStream, varSymbol); invokableSymbol.params.add(varSymbol); } if (dataInStream.readBoolean()) { String paramName = getStringCPEntryValue(dataInStream); BVarSymbol restParam = new BVarSymbol(0, names.fromString(paramName), this.env.pkgSymbol.pkgID, invokableType.restType, invokableSymbol, symTable.builtinPos, COMPILED_SOURCE); invokableSymbol.restParam = restParam; defineAnnotAttachmentSymbols(dataInStream, restParam); } if (Symbols.isFlagOn(invokableSymbol.retType.flags, Flags.PARAMETERIZED)) { Map<Name, BVarSymbol> paramsMap = new HashMap<>(); for (BVarSymbol param : invokableSymbol.params) { if (paramsMap.put(param.getName(), param) != null) { throw new IllegalStateException("duplicate key: " + param.getName()); } } populateParameterizedType(invokableSymbol.retType, paramsMap, invokableSymbol); } BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol; tsymbol.flags = invokableSymbol.flags; tsymbol.params = invokableSymbol.params; tsymbol.restParam = invokableSymbol.restParam; tsymbol.returnType = invokableSymbol.retType; boolean hasReceiver = dataInStream.readBoolean(); if (hasReceiver) { dataInStream.readByte(); readBType(dataInStream); getStringCPEntryValue(dataInStream); } } private void defineAnnotAttachmentSymbols(DataInputStream dataInStream, Annotatable owner) throws IOException { dataInStream.readLong(); int annotSymbolCount = dataInStream.readInt(); if (annotSymbolCount == 0) { return; } List<BAnnotationAttachmentSymbol> annotationAttachmentSymbols = (List<BAnnotationAttachmentSymbol>) owner.getAnnotations(); for (int j = 0; j < annotSymbolCount; j++) { annotationAttachmentSymbols.add(defineAnnotationAttachmentSymbol(dataInStream, (BSymbol) owner)); } } /** * This method is used for filling the `paramSymbol` field in a parameterized type. Since we want to use the same * symbol of the parameter referred to by the type, we have to wait until the parameter symbols are defined to fill * in the `paramSymbol` field. Only types with constituent types are considered here since those are the only types * which can recursively hold a parameterized type. * * @param type The return type of a function, which possibly contains a parameterized type * @param paramsMap A mapping between the parameter names and the parameter symbols of the function * @param invSymbol The symbol of the function */ private void populateParameterizedType(BType type, final Map<Name, BVarSymbol> paramsMap, BInvokableSymbol invSymbol) { if (type == null) { return; } switch (type.tag) { case TypeTags.PARAMETERIZED_TYPE: BParameterizedType varType = (BParameterizedType) type; varType.paramSymbol = paramsMap.get(varType.name); varType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PARAMETERIZED | varType.paramSymbol.flags, varType.paramSymbol.name, varType.paramSymbol.originalName, varType.paramSymbol.pkgID, varType, invSymbol, varType.paramSymbol.pos, VIRTUAL); break; case TypeTags.MAP: case TypeTags.FUTURE: case TypeTags.TYPEDESC: ConstrainedType constrainedType = (ConstrainedType) type; populateParameterizedType((BType) constrainedType.getConstraint(), paramsMap, invSymbol); break; case TypeTags.XML: populateParameterizedType(((BXMLType) type).constraint, paramsMap, invSymbol); break; case TypeTags.ARRAY: populateParameterizedType(((BArrayType) type).eType, paramsMap, invSymbol); break; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) type; for (BType t : tupleType.tupleTypes) { populateParameterizedType(t, paramsMap, invSymbol); } populateParameterizedType(tupleType.restType, paramsMap, invSymbol); break; case TypeTags.STREAM: BStreamType streamType = (BStreamType) type; populateParameterizedType(streamType.constraint, paramsMap, invSymbol); populateParameterizedType(streamType.completionType, paramsMap, invSymbol); break; case TypeTags.TABLE: BTableType tableType = (BTableType) type; populateParameterizedType(tableType.constraint, paramsMap, invSymbol); populateParameterizedType(tableType.keyTypeConstraint, paramsMap, invSymbol); break; case TypeTags.INVOKABLE: BInvokableType invokableType = (BInvokableType) type; if (Symbols.isFlagOn(invokableType.flags, Flags.ANY_FUNCTION)) { break; } for (BType t : invokableType.paramTypes) { populateParameterizedType(t, paramsMap, invSymbol); } populateParameterizedType(invokableType.restType, paramsMap, invSymbol); populateParameterizedType(invokableType.retType, paramsMap, invSymbol); break; case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType t : unionType.getMemberTypes()) { populateParameterizedType(t, paramsMap, invSymbol); } break; } } private Location readPosition(DataInputStream dataInStream) throws IOException { String cUnitName = getStringCPEntryValue(dataInStream); int sLine = dataInStream.readInt(); int sCol = dataInStream.readInt(); int eLine = dataInStream.readInt(); int eCol = dataInStream.readInt(); return new BLangDiagnosticLocation(cUnitName, sLine, eLine, sCol, eCol); } private String getStringCPEntryValue(DataInputStream dataInStream) throws IOException { int pkgNameCPIndex = dataInStream.readInt(); StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[pkgNameCPIndex]; return stringCPEntry.value; } private String getStringCPEntryValue(int cpIndex) throws IOException { StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[cpIndex]; return stringCPEntry.value; } private long getIntCPEntryValue(DataInputStream dataInStream) throws IOException { int pkgNameCPIndex = dataInStream.readInt(); IntegerCPEntry intCPEntry = (IntegerCPEntry) this.env.constantPool[pkgNameCPIndex]; return intCPEntry.value; } private int getByteCPEntryValue(DataInputStream dataInStream) throws IOException { int byteCpIndex = dataInStream.readInt(); ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex]; return byteCPEntry.value; } private String getFloatCPEntryValue(DataInputStream dataInStream) throws IOException { int floatCpIndex = dataInStream.readInt(); FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex]; return Double.toString(floatCPEntry.value); } private PackageID createPackageID(String orgName, String pkgName, String moduleName, String pkgVersion) { if (orgName == null || orgName.isEmpty()) { throw new BLangCompilerException("invalid module name '" + moduleName + "' in compiled package file"); } return new PackageID(names.fromString(orgName), names.fromString(pkgName), names.fromString(moduleName), names.fromString(pkgVersion), null); } /** * This class holds compiled package specific information during the symbol enter phase of the compiled package. * * @since 0.970.0 */ private static class BIRPackageSymbolEnv { PackageID requestedPackageId; Map<Integer, byte[]> unparsedBTypeCPs = new HashMap<>(); BPackageSymbol pkgSymbol; CPEntry[] constantPool; List<UnresolvedType> unresolvedTypes; BIRPackageSymbolEnv() { this.unresolvedTypes = new ArrayList<>(); } } private static class UnresolvedType { String typeSig; Consumer<BType> completer; UnresolvedType(String typeSig, Consumer<BType> completer) { this.typeSig = typeSig; this.completer = completer; } } private class BIRTypeReader { private DataInputStream inputStream; public BIRTypeReader(DataInputStream inputStream) { this.inputStream = inputStream; } private BType readTypeFromCp() throws IOException { return readBType(inputStream); } public BType readType(int cpI) throws IOException { byte tag = inputStream.readByte(); Name name = names.fromString(getStringCPEntryValue(inputStream)); var flags = inputStream.readLong(); int typeFlags = inputStream.readInt(); switch (tag) { case TypeTags.INT: return typeParamAnalyzer.getNominalType(symTable.intType, name, flags); case TypeTags.BYTE: return typeParamAnalyzer.getNominalType(symTable.byteType, name, flags); case TypeTags.FLOAT: return typeParamAnalyzer.getNominalType(symTable.floatType, name, flags); case TypeTags.DECIMAL: return typeParamAnalyzer.getNominalType(symTable.decimalType, name, flags); case TypeTags.STRING: return typeParamAnalyzer.getNominalType(symTable.stringType, name, flags); case TypeTags.BOOLEAN: return typeParamAnalyzer.getNominalType(symTable.booleanType, name, flags); case TypeTags.JSON: return isImmutable(flags) ? getEffectiveImmutableType(symTable.jsonType) : symTable.jsonType; case TypeTags.XML: BType constraintType = readTypeFromCp(); BXMLType mutableXmlType = new BXMLType(constraintType, symTable.xmlType.tsymbol); if (Symbols.isFlagOn(flags, Flags.PARAMETERIZED)) { mutableXmlType.flags |= Flags.PARAMETERIZED; } return isImmutable(flags) ? getEffectiveImmutableType(mutableXmlType) : mutableXmlType; case TypeTags.NIL: return symTable.nilType; case TypeTags.NEVER: return symTable.neverType; case TypeTags.ANYDATA: if (name.getValue().equals(Names.ANYDATA.getValue())) { name = Names.EMPTY; } BType anydataNominalType = typeParamAnalyzer.getNominalType(symTable.anydataType, name, flags); return isImmutable(flags) ? getEffectiveImmutableType(anydataNominalType, symTable.anydataType.tsymbol.pkgID, symTable.anydataType.tsymbol.owner) : anydataNominalType; case TypeTags.RECORD: int pkgCpIndex = inputStream.readInt(); PackageID pkgId = getPackageId(pkgCpIndex); String recordName = getStringCPEntryValue(inputStream); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(EnumSet.of(Flag.PUBLIC)), names.fromString(recordName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); recordSymbol.flags |= flags; recordSymbol.scope = new Scope(recordSymbol); BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags); recordType.flags |= flags; if (isImmutable(flags)) { recordSymbol.flags |= Flags.READONLY; } recordSymbol.type = recordType; compositeStack.push(recordType); addShapeCP(recordType, cpI); recordType.sealed = inputStream.readBoolean(); recordType.restFieldType = readTypeFromCp(); int recordFields = inputStream.readInt(); for (int i = 0; i < recordFields; i++) { String fieldName = getStringCPEntryValue(inputStream); var fieldFlags = inputStream.readLong(); byte[] docBytes = readDocBytes(inputStream); BType fieldType = readTypeFromCp(); BVarSymbol varSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName), recordSymbol.pkgID, fieldType, recordSymbol.scope.owner, symTable.builtinPos, COMPILED_SOURCE); defineMarkDownDocAttachment(varSymbol, docBytes); BField structField = new BField(varSymbol.name, varSymbol.pos, varSymbol); recordType.fields.put(structField.name.value, structField); recordSymbol.scope.define(varSymbol.name, varSymbol); } boolean isInitAvailable = inputStream.readByte() == 1; if (isInitAvailable) { String recordInitFuncName = getStringCPEntryValue(inputStream); var recordInitFuncFlags = inputStream.readLong(); BInvokableType recordInitFuncType = (BInvokableType) readTypeFromCp(); Name initFuncName = names.fromString(recordInitFuncName); boolean isNative = Symbols.isFlagOn(recordInitFuncFlags, Flags.NATIVE); BInvokableSymbol recordInitFuncSymbol = Symbols.createFunctionSymbol(recordInitFuncFlags, initFuncName, initFuncName, env.pkgSymbol.pkgID, recordInitFuncType, env.pkgSymbol, isNative, symTable.builtinPos, COMPILED_SOURCE); recordInitFuncSymbol.retType = recordInitFuncType.retType; recordSymbol.initializerFunc = new BAttachedFunction(initFuncName, recordInitFuncSymbol, recordInitFuncType, symTable.builtinPos); recordSymbol.scope.define(initFuncName, recordInitFuncSymbol); } recordType.typeInclusions = readTypeInclusions(); Object poppedRecordType = compositeStack.pop(); assert poppedRecordType == recordType; if (pkgId.equals(env.pkgSymbol.pkgID)) { return recordType; } SymbolEnv pkgEnv = symTable.pkgEnvMap.get(packageCache.getSymbol(pkgId)); return getType(recordType, pkgEnv, names.fromString(recordName)); case TypeTags.TYPEDESC: BTypedescType typedescType = new BTypedescType(null, symTable.typeDesc.tsymbol); typedescType.constraint = readTypeFromCp(); typedescType.flags = flags; return typedescType; case TypeTags.TYPEREFDESC: int pkgIndex = inputStream.readInt(); PackageID pkg = getPackageId(pkgIndex); BPackageSymbol pkgSymbol = pkg.equals(env.pkgSymbol.pkgID) ? env.pkgSymbol : packageCache.getSymbol(pkg); String typeDefName = getStringCPEntryValue(inputStream); BTypeSymbol typeSymbol = Symbols.createTypeSymbol(SymTag.TYPE_REF, Flags.asMask(EnumSet.of(Flag.PUBLIC)), names.fromString(typeDefName), pkg, null, pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); boolean nullable = (typeFlags & TypeFlags.NILABLE) == TypeFlags.NILABLE; BTypeReferenceType typeReferenceType = new BTypeReferenceType(null, typeSymbol, flags, nullable); addShapeCP(typeReferenceType, cpI); compositeStack.push(typeReferenceType); typeReferenceType.referredType = readTypeFromCp(); Object poppedRefType = compositeStack.pop(); assert poppedRefType == typeReferenceType; return typeReferenceType; case TypeTags.PARAMETERIZED_TYPE: BParameterizedType type = new BParameterizedType(null, null, null, name, -1); type.paramValueType = readTypeFromCp(); type.flags = flags; type.paramIndex = inputStream.readInt(); return type; case TypeTags.STREAM: BStreamType bStreamType = new BStreamType(TypeTags.STREAM, null, null, symTable.streamType.tsymbol); bStreamType.constraint = readTypeFromCp(); bStreamType.completionType = readTypeFromCp(); bStreamType.flags = flags; return bStreamType; case TypeTags.TABLE: BTableType bTableType = new BTableType(TypeTags.TABLE, null, symTable.tableType.tsymbol, flags); bTableType.constraint = readTypeFromCp(); boolean hasFieldNameList = inputStream.readByte() == 1; if (hasFieldNameList) { int fieldNameListSize = inputStream.readInt(); bTableType.fieldNameList = new ArrayList<>(fieldNameListSize); for (int i = 0; i < fieldNameListSize; i++) { String fieldName = getStringCPEntryValue(inputStream); bTableType.fieldNameList.add(fieldName); } } boolean hasKeyConstraint = inputStream.readByte() == 1; if (hasKeyConstraint) { bTableType.keyTypeConstraint = readTypeFromCp(); if (bTableType.keyTypeConstraint.tsymbol == null) { bTableType.keyTypeConstraint.tsymbol = Symbols.createTypeSymbol(SymTag.TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, bTableType.keyTypeConstraint, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); } } return bTableType; case TypeTags.MAP: BMapType bMapType = new BMapType(TypeTags.MAP, null, symTable.mapType.tsymbol, flags); bMapType.constraint = readTypeFromCp(); return bMapType; case TypeTags.INVOKABLE: BInvokableType bInvokableType = new BInvokableType(null, null, null, null); bInvokableType.flags = flags; if (inputStream.readBoolean()) { return bInvokableType; } int paramCount = inputStream.readInt(); List<BType> paramTypes = new ArrayList<>(paramCount); for (int i = 0; i < paramCount; i++) { paramTypes.add(readTypeFromCp()); } bInvokableType.paramTypes = paramTypes; if (inputStream.readBoolean()) { bInvokableType.restType = readTypeFromCp(); } bInvokableType.retType = readTypeFromCp(); return bInvokableType; case TypeTags.ANY: BType anyNominalType = typeParamAnalyzer.getNominalType(symTable.anyType, name, flags); return isImmutable(flags) ? getEffectiveImmutableType(anyNominalType, symTable.anyType.tsymbol.pkgID, symTable.anyType.tsymbol.owner) : anyNominalType; case TypeTags.HANDLE: return symTable.handleType; case TypeTags.READONLY: return symTable.readonlyType; case TypeTags.ENDPOINT: break; case TypeTags.ARRAY: byte state = inputStream.readByte(); int size = inputStream.readInt(); BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); BArrayType bArrayType = new BArrayType(null, arrayTypeSymbol, size, BArrayState.valueOf(state), flags); bArrayType.eType = readTypeFromCp(); return bArrayType; case TypeTags.UNION: boolean isCyclic = inputStream.readByte() == 1; boolean hasName = inputStream.readByte() == 1; PackageID unionsPkgId = env.pkgSymbol.pkgID; Name unionName = Names.EMPTY; if (hasName) { pkgCpIndex = inputStream.readInt(); unionsPkgId = getPackageId(pkgCpIndex); String unionNameStr = getStringCPEntryValue(inputStream); unionName = names.fromString(unionNameStr); } BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), unionName, unionsPkgId, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); int unionMemberCount = inputStream.readInt(); BUnionType unionType = BUnionType.create(unionTypeSymbol, new LinkedHashSet<>(unionMemberCount)); unionType.name = unionName; addShapeCP(unionType, cpI); compositeStack.push(unionType); unionType.flags = flags; unionType.isCyclic = isCyclic; for (int i = 0; i < unionMemberCount; i++) { unionType.add(readTypeFromCp()); } int unionOriginalMemberCount = inputStream.readInt(); LinkedHashSet<BType> originalMemberTypes = new LinkedHashSet<>(unionOriginalMemberCount); for (int i = 0; i < unionOriginalMemberCount; i++) { originalMemberTypes.add(readTypeFromCp()); } unionType.setOriginalMemberTypes(originalMemberTypes); var poppedUnionType = compositeStack.pop(); assert poppedUnionType == unionType; boolean isEnum = inputStream.readBoolean(); if (isEnum) { readAndSetEnumSymbol(unionType, flags); } if (hasName) { if (unionsPkgId.equals(env.pkgSymbol.pkgID)) { return unionType; } else { pkgEnv = symTable.pkgEnvMap.get(packageCache.getSymbol(unionsPkgId)); if (pkgEnv != null) { BType existingUnionType = getType(unionType, pkgEnv, unionName); if (existingUnionType != symTable.noType) { return existingUnionType; } } } } return unionType; case TypeTags.INTERSECTION: BTypeSymbol intersectionTypeSymbol = Symbols.createTypeSymbol(SymTag.INTERSECTION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); int intersectionMemberCount = inputStream.readInt(); LinkedHashSet<BType> constituentTypes = new LinkedHashSet<>(intersectionMemberCount); for (int i = 0; i < intersectionMemberCount; i++) { constituentTypes.add(readTypeFromCp()); } IntersectableReferenceType effectiveType = (IntersectableReferenceType) readTypeFromCp(); return new BIntersectionType(intersectionTypeSymbol, constituentTypes, effectiveType, flags); case TypeTags.PACKAGE: break; case TypeTags.NONE: return symTable.noType; case TypeTags.VOID: break; case TypeTags.XMLNS: break; case TypeTags.ANNOTATION: break; case TypeTags.SEMANTIC_ERROR: break; case TypeTags.ERROR: pkgCpIndex = inputStream.readInt(); pkgId = getPackageId(pkgCpIndex); BPackageSymbol owner = packageCache.getSymbol(pkgId); BTypeSymbol errorSymbol; if (owner != null) { errorSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.EMPTY, owner.pkgID, null, owner, symTable.builtinPos, COMPILED_SOURCE); } else { errorSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); } BErrorType errorType = new BErrorType(errorSymbol); addShapeCP(errorType, cpI); compositeStack.push(errorType); String errorName = getStringCPEntryValue(inputStream); BType detailsType = readTypeFromCp(); errorType.detailType = detailsType; errorType.flags = flags; errorSymbol.type = errorType; errorSymbol.pkgID = pkgId; errorSymbol.originalName = errorSymbol.name = names.fromString(errorName); Object poppedErrorType = compositeStack.pop(); assert poppedErrorType == errorType; if (!env.pkgSymbol.pkgID.equals(PackageID.ANNOTATIONS) && Symbols.isFlagOn(flags, Flags.NATIVE)) { return symTable.errorType; } errorType.typeIdSet = readTypeIdSet(inputStream); return errorType; case TypeTags.ITERATOR: break; case TypeTags.TUPLE: BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner, symTable.builtinPos, COMPILED_SOURCE); BTupleType bTupleType = new BTupleType(tupleTypeSymbol, null); bTupleType.flags = flags; int tupleMemberCount = inputStream.readInt(); List<BType> tupleMemberTypes = new ArrayList<>(tupleMemberCount); for (int i = 0; i < tupleMemberCount; i++) { tupleMemberTypes.add(readTypeFromCp()); } bTupleType.tupleTypes = tupleMemberTypes; if (inputStream.readBoolean()) { bTupleType.restType = readTypeFromCp(); } return bTupleType; case TypeTags.FUTURE: BFutureType bFutureType = new BFutureType(TypeTags.FUTURE, null, symTable.futureType.tsymbol); bFutureType.constraint = readTypeFromCp(); bFutureType.flags = flags; return bFutureType; case TypeTags.FINITE: String finiteTypeName = getStringCPEntryValue(inputStream); var finiteTypeFlags = inputStream.readLong(); BTypeSymbol symbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteTypeFlags, names.fromString(finiteTypeName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); symbol.scope = new Scope(symbol); BFiniteType finiteType = new BFiniteType(symbol); finiteType.flags = flags; symbol.type = finiteType; int valueSpaceSize = inputStream.readInt(); for (int i = 0; i < valueSpaceSize; i++) { defineValueSpace(inputStream, finiteType, this); } return finiteType; case TypeTags.OBJECT: boolean service = inputStream.readByte() == 1; pkgCpIndex = inputStream.readInt(); pkgId = getPackageId(pkgCpIndex); String objName = getStringCPEntryValue(inputStream); var objFlags = (inputStream.readBoolean() ? Flags.CLASS : 0) | Flags.PUBLIC; objFlags = inputStream.readBoolean() ? objFlags | Flags.CLIENT : objFlags; BObjectTypeSymbol objectSymbol; if (Symbols.isFlagOn(objFlags, Flags.CLASS)) { objectSymbol = Symbols.createClassSymbol(objFlags, names.fromString(objName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE, false); } else { objectSymbol = Symbols.createObjectSymbol(objFlags, names.fromString(objName), env.pkgSymbol.pkgID, null, env.pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); } objectSymbol.scope = new Scope(objectSymbol); BObjectType objectType; objectType = new BObjectType(objectSymbol); if (service) { objectType.flags |= Flags.SERVICE; objectSymbol.flags |= Flags.SERVICE; } if (isImmutable(flags)) { objectSymbol.flags |= Flags.READONLY; } if (Symbols.isFlagOn(flags, Flags.ANONYMOUS)) { objectSymbol.flags |= Flags.ANONYMOUS; } objectType.flags = flags; objectSymbol.type = objectType; addShapeCP(objectType, cpI); compositeStack.push(objectType); int fieldCount = inputStream.readInt(); for (int i = 0; i < fieldCount; i++) { String fieldName = getStringCPEntryValue(inputStream); var fieldFlags = inputStream.readLong(); var defaultable = inputStream.readBoolean(); byte[] docBytes = readDocBytes(inputStream); BType fieldType = readTypeFromCp(); BVarSymbol objectVarSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName), objectSymbol.pkgID, fieldType, objectSymbol.scope.owner, symTable.builtinPos, COMPILED_SOURCE); objectVarSymbol.isDefaultable = defaultable; defineMarkDownDocAttachment(objectVarSymbol, docBytes); BField structField = new BField(objectVarSymbol.name, null, objectVarSymbol); objectType.fields.put(structField.name.value, structField); objectSymbol.scope.define(objectVarSymbol.name, objectVarSymbol); } boolean generatedConstructorPresent = inputStream.readBoolean(); if (generatedConstructorPresent) { ignoreAttachedFunc(); } boolean constructorPresent = inputStream.readBoolean(); if (constructorPresent) { ignoreAttachedFunc(); } int funcCount = inputStream.readInt(); for (int i = 0; i < funcCount; i++) { if (isImmutable(objectSymbol.flags) && Symbols.isFlagOn(flags, Flags.ANONYMOUS)) { populateIntersectionTypeReferencedFunctions(inputStream, objectSymbol); } else { ignoreAttachedFunc(); } } objectType.typeInclusions = readTypeInclusions(); objectType.typeIdSet = readTypeIdSet(inputStream); Object poppedObjType = compositeStack.pop(); assert poppedObjType == objectType; if (pkgId.equals(env.pkgSymbol.pkgID)) { return objectType; } pkgEnv = symTable.pkgEnvMap.get(packageCache.getSymbol(pkgId)); return getType(objectType, pkgEnv, names.fromString(objName)); case TypeTags.BYTE_ARRAY: break; case TypeTags.FUNCTION_POINTER: break; case SERVICE_TYPE_TAG: throw new AssertionError(); case TypeTags.SIGNED32_INT: return symTable.signed32IntType; case TypeTags.SIGNED16_INT: return symTable.signed16IntType; case TypeTags.SIGNED8_INT: return symTable.signed8IntType; case TypeTags.UNSIGNED32_INT: return symTable.unsigned32IntType; case TypeTags.UNSIGNED16_INT: return symTable.unsigned16IntType; case TypeTags.UNSIGNED8_INT: return symTable.unsigned8IntType; case TypeTags.CHAR_STRING: return symTable.charStringType; case TypeTags.XML_ELEMENT: return isImmutable(flags) ? getEffectiveImmutableType(symTable.xmlElementType) : symTable.xmlElementType; case TypeTags.XML_PI: return isImmutable(flags) ? getEffectiveImmutableType(symTable.xmlPIType) : symTable.xmlPIType; case TypeTags.XML_COMMENT: return isImmutable(flags) ? getEffectiveImmutableType(symTable.xmlCommentType) : symTable.xmlCommentType; case TypeTags.XML_TEXT: return symTable.xmlTextType; } return null; } private BTypeIdSet readTypeIdSet(DataInputStream inputStream) throws IOException { Set<BTypeIdSet.BTypeId> primary = new HashSet<>(); int primaryTypeIdCount = inputStream.readInt(); for (int i = 0; i < primaryTypeIdCount; i++) { primary.add(readTypeId(inputStream)); } Set<BTypeIdSet.BTypeId> secondary = new HashSet<>(); int secondaryTypeIdCount = inputStream.readInt(); for (int i = 0; i < secondaryTypeIdCount; i++) { secondary.add(readTypeId(inputStream)); } return new BTypeIdSet(primary, secondary); } private BTypeIdSet.BTypeId readTypeId(DataInputStream inputStream) throws IOException { int pkgCPIndex = inputStream.readInt(); PackageID packageId = getPackageId(pkgCPIndex); String name = getStringCPEntryValue(inputStream); boolean isPublicTypeId = inputStream.readBoolean(); return new BTypeIdSet.BTypeId(packageId, name, isPublicTypeId); } private void ignoreAttachedFunc() throws IOException { getStringCPEntryValue(inputStream); getStringCPEntryValue(inputStream); inputStream.readLong(); readTypeFromCp(); } private List<BType> readTypeInclusions() throws IOException { int nTypeInclusions = inputStream.readInt(); List<BType> typeInclusions = new ArrayList<>(nTypeInclusions); for (int i = 0; i < nTypeInclusions; i++) { BType inclusion = readTypeFromCp(); typeInclusions.add(inclusion); } return typeInclusions; } private void readAndSetEnumSymbol(BUnionType unionType, long flags) throws IOException { PackageID enumPkgId = getPackageId(inputStream.readInt()); String enumName = getStringCPEntryValue(inputStream); int memberCount = inputStream.readInt(); BSymbol pkgSymbol = packageCache.getSymbol(enumPkgId); if (pkgSymbol == null) { pkgSymbol = env.pkgSymbol; } SymbolEnv enumPkgEnv = symTable.pkgEnvMap.get(pkgSymbol); if (enumPkgEnv == null) { enumPkgEnv = SymbolEnv.createPkgEnv(null, env.pkgSymbol.scope, null); } List<BConstantSymbol> members = new ArrayList<>(); for (int i = 0; i < memberCount; i++) { String memName = getStringCPEntryValue(inputStream); BSymbol sym = symbolResolver.lookupSymbolInMainSpace(enumPkgEnv, names.fromString(memName)); members.add((BConstantSymbol) sym); } unionType.tsymbol = new BEnumSymbol(members, flags, names.fromString(enumName), pkgSymbol.pkgID, unionType, pkgSymbol, symTable.builtinPos, COMPILED_SOURCE); } private void populateIntersectionTypeReferencedFunctions(DataInputStream inputStream, BObjectTypeSymbol objectSymbol) throws IOException { String attachedFuncName = getStringCPEntryValue(inputStream); String attachedFuncOrigName = getStringCPEntryValue(inputStream); var attachedFuncFlags = inputStream.readLong(); if (Symbols.isFlagOn(attachedFuncFlags, Flags.INTERFACE) && Symbols.isFlagOn(attachedFuncFlags, Flags.ATTACHED)) { BInvokableType attachedFuncType = (BInvokableType) readTypeFromCp(); Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName( objectSymbol.name.value, attachedFuncName)); Name funcOrigName = names.fromString(attachedFuncOrigName); BInvokableSymbol attachedFuncSymbol = Symbols.createFunctionSymbol(attachedFuncFlags, funcName, funcOrigName, env.pkgSymbol.pkgID, attachedFuncType, env.pkgSymbol, false, symTable.builtinPos, COMPILED_SOURCE); BAttachedFunction attachedFunction = new BAttachedFunction(names.fromString(attachedFuncName), attachedFuncSymbol, attachedFuncType, symTable.builtinPos); setInvokableTypeSymbol(attachedFuncType); if (!Symbols.isFlagOn(attachedFuncType.flags, Flags.ANY_FUNCTION)) { BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) attachedFuncType.tsymbol; attachedFuncSymbol.params = tsymbol.params; attachedFuncSymbol.restParam = tsymbol.restParam; attachedFuncSymbol.retType = tsymbol.returnType; } objectSymbol.referencedFunctions.add(attachedFunction); objectSymbol.attachedFuncs.add(attachedFunction); objectSymbol.scope.define(funcName, attachedFuncSymbol); } } } private BType getType(BType readShape, SymbolEnv pkgEnv, Name name) { BType type = symbolResolver.lookupSymbolInMainSpace(pkgEnv, name).type; if (type != symTable.noType && (!name.value.contains(ANON_PREFIX) || types.isSameBIRShape(readShape, type))) { return type; } if (pkgEnv.node != null) { for (BLangTypeDefinition typeDefinition : ((BLangPackage) pkgEnv.node).typeDefinitions) { BSymbol symbol = typeDefinition.symbol; String typeDefName = typeDefinition.name.value; if (typeDefName.contains(ANON_PREFIX)) { BType anonType = symbol.type; if (types.isSameBIRShape(readShape, anonType)) { return anonType; } } else if (typeDefName.equals(name.value)) { return symbol.type; } } } else { for (Map.Entry<Name, Scope.ScopeEntry> value : pkgEnv.scope.entries.entrySet()) { BSymbol symbol = value.getValue().symbol; if (value.getKey().value.contains(ANON_PREFIX)) { BType anonType = symbol.type; if (types.isSameBIRShape(readShape, anonType)) { return anonType; } } } } return type; } private byte[] readDocBytes(DataInputStream inputStream) throws IOException { int docLength = inputStream.readInt(); byte[] docBytes = new byte[docLength]; int noOfBytesRead = inputStream.read(docBytes); if (docLength != noOfBytesRead) { throw new RuntimeException("failed to read Markdown Documentation"); } return docBytes; } private PackageID getPackageId(int pkgCPIndex) { PackageCPEntry pkgCpEntry = (PackageCPEntry) env.constantPool[pkgCPIndex]; String orgName = ((StringCPEntry) env.constantPool[pkgCpEntry.orgNameCPIndex]).value; String pkgName = ((StringCPEntry) env.constantPool[pkgCpEntry.pkgNameCPIndex]).value; String moduleName = ((StringCPEntry) env.constantPool[pkgCpEntry.moduleNameCPIndex]).value; String version = ((StringCPEntry) env.constantPool[pkgCpEntry.versionCPIndex]).value; return new PackageID(names.fromString(orgName), names.fromString(pkgName), names.fromString(moduleName), names.fromString(version), null); } private void defineValueSpace(DataInputStream dataInStream, BFiniteType finiteType, BIRTypeReader typeReader) throws IOException { BType valueType = typeReader.readTypeFromCp(); dataInStream.readInt(); BLangLiteral litExpr = createLiteralBasedOnType(valueType); switch (valueType.tag) { case TypeTags.INT: int integerCpIndex = dataInStream.readInt(); IntegerCPEntry integerCPEntry = (IntegerCPEntry) this.env.constantPool[integerCpIndex]; litExpr.value = integerCPEntry.value; break; case TypeTags.BYTE: int byteCpIndex = dataInStream.readInt(); ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex]; litExpr.value = byteCPEntry.value; break; case TypeTags.FLOAT: int floatCpIndex = dataInStream.readInt(); FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex]; litExpr.value = Double.toString(floatCPEntry.value); break; case TypeTags.STRING: case TypeTags.DECIMAL: litExpr.value = getStringCPEntryValue(dataInStream); break; case TypeTags.BOOLEAN: litExpr.value = dataInStream.readBoolean(); break; case TypeTags.NIL: litExpr.originalValue = "null"; break; default: throw new UnsupportedOperationException("finite type value is not supported for type: " + valueType); } litExpr.setBType(valueType); finiteType.addValue(litExpr); } private BLangLiteral createLiteralBasedOnType(BType valueType) { NodeKind nodeKind = valueType.tag <= TypeTags.DECIMAL ? NodeKind.NUMERIC_LITERAL : NodeKind.LITERAL; return nodeKind == NodeKind.LITERAL ? (BLangLiteral) TreeBuilder.createLiteralExpression() : (BLangLiteral) TreeBuilder.createNumericLiteralExpression(); } private boolean isImmutable(long flags) { return Symbols.isFlagOn(flags, Flags.READONLY); } private BType getEffectiveImmutableType(BType type) { return ImmutableTypeCloner.getEffectiveImmutableType(null, types, type, type.tsymbol.pkgID, type.tsymbol.owner, symTable, null, names); } private BType getEffectiveImmutableType(BType type, PackageID pkgID, BSymbol owner) { return ImmutableTypeCloner.getEffectiveImmutableType(null, types, type, pkgID, owner, symTable, null, names); } }
P: (p(col) or col is null) and q(col) if when element in col is null, both p(col) and q(col) is false, then P is false, p(col) and q(col) is false too, if when element in col is not null, then P can be reduced p(col) and q(col), so P can be always reduced to p(col) and q(col) if col=false implies p(col) and q(col) is false
public ScalarOperator visitInPredicate(InPredicateOperator predicate, Void context) { ScalarOperator negation = new InPredicateOperator(!predicate.isNotIn(), predicate.getChildren()); if (predicate.getChild(0).isNullable()) { ScalarOperator isNull = new IsNullPredicateOperator(predicate.getChild(0)); return new CompoundPredicateOperator(CompoundType.OR, negation, isNull); } else { return negation; } }
return new CompoundPredicateOperator(CompoundType.OR, negation, isNull);
public ScalarOperator visitInPredicate(InPredicateOperator predicate, Void context) { ScalarOperator negation = new InPredicateOperator(!predicate.isNotIn(), predicate.getChildren()); if (predicate.getChild(0).isNullable()) { ScalarOperator isNull = new IsNullPredicateOperator(predicate.getChild(0)); return new CompoundPredicateOperator(CompoundType.OR, negation, isNull); } else { return negation; } }
class NegateFilterShuttle extends BaseScalarOperatorShuttle { private static NegateFilterShuttle INSTANCE = new NegateFilterShuttle(); public static NegateFilterShuttle getInstance() { return INSTANCE; } public ScalarOperator negateFilter(ScalarOperator scalarOperator) { return scalarOperator.accept(this, null); } @Override public ScalarOperator visitCompoundPredicate(CompoundPredicateOperator predicate, Void context) { ScalarOperator negation; if (CompoundType.NOT == predicate.getCompoundType()) { negation = predicate.getChild(0); if (predicate.getChild(0).isNullable()) { return new CompoundPredicateOperator(CompoundType.OR, negation, new IsNullPredicateOperator(predicate.getChild(0))); } else { return negation; } } else { negation = new CompoundPredicateOperator(CompoundType.NOT, predicate); if (predicate.isNullable()) { return new CompoundPredicateOperator(CompoundType.OR, negation, new IsNullPredicateOperator(predicate)); } else { return negation; } } } @Override public ScalarOperator visitBinaryPredicate(BinaryPredicateOperator predicate, Void context) { ScalarOperator negation; if (BinaryType.EQ_FOR_NULL == predicate.getBinaryType()) { negation = new CompoundPredicateOperator(CompoundType.NOT, predicate); } else { negation = predicate.negative(); } if (predicate.getChild(0).isNullable()) { ScalarOperator isNull = new IsNullPredicateOperator(predicate.getChild(0)); return new CompoundPredicateOperator(CompoundType.OR, negation, isNull); } else { return negation; } } @Override @Override public ScalarOperator visitIsNullPredicate(IsNullPredicateOperator predicate, Void context) { return new IsNullPredicateOperator(!predicate.isNotNull(), predicate.getChild(0)); } }
class NegateFilterShuttle extends BaseScalarOperatorShuttle { private static NegateFilterShuttle INSTANCE = new NegateFilterShuttle(); public static NegateFilterShuttle getInstance() { return INSTANCE; } public ScalarOperator negateFilter(ScalarOperator scalarOperator) { return scalarOperator.accept(this, null); } @Override public ScalarOperator visitCompoundPredicate(CompoundPredicateOperator predicate, Void context) { ScalarOperator negation; if (CompoundType.NOT == predicate.getCompoundType()) { negation = predicate.getChild(0); if (predicate.getChild(0).isNullable()) { return new CompoundPredicateOperator(CompoundType.OR, negation, new IsNullPredicateOperator(predicate.getChild(0))); } else { return negation; } } else { negation = new CompoundPredicateOperator(CompoundType.NOT, predicate); if (predicate.isNullable()) { return new CompoundPredicateOperator(CompoundType.OR, negation, new IsNullPredicateOperator(predicate)); } else { return negation; } } } @Override public ScalarOperator visitBinaryPredicate(BinaryPredicateOperator predicate, Void context) { ScalarOperator negation; if (BinaryType.EQ_FOR_NULL == predicate.getBinaryType()) { negation = new CompoundPredicateOperator(CompoundType.NOT, predicate); } else { negation = predicate.negative(); } if (predicate.getChild(0).isNullable()) { ScalarOperator isNull = new IsNullPredicateOperator(predicate.getChild(0)); return new CompoundPredicateOperator(CompoundType.OR, negation, isNull); } else { return negation; } } @Override @Override public ScalarOperator visitIsNullPredicate(IsNullPredicateOperator predicate, Void context) { return new IsNullPredicateOperator(!predicate.isNotNull(), predicate.getChild(0)); } }
Any reason to have it as 'guessOrgName' ? Can change it to 'orgName'
private String getOrgName() { String guessOrgName = System.getProperty("user.name"); if (guessOrgName == null) { guessOrgName = "my_org"; } else { guessOrgName = guessOrgName.toLowerCase(Locale.getDefault()); } return guessOrgName; }
String guessOrgName = System.getProperty("user.name");
private String getOrgName() { String orgName = System.getProperty("user.name"); if (orgName == null) { orgName = "my_org"; } else { orgName = orgName.toLowerCase(Locale.getDefault()); } return orgName; }
class PackagingInitTestCase extends IntegrationTestCase { private ServerInstance ballerinaServer; private String serverZipPath; private Path tempProjectDirectory; @BeforeClass() public void setUp() throws BallerinaTestException, IOException { tempProjectDirectory = Files.createTempDirectory("bal-test-integration-packaging-project-"); serverZipPath = System.getProperty(Constant.SYSTEM_PROP_SERVER_ZIP); } @Test(description = "Test creating a project with a main in a package") public void testInitWithMainInPackage() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesMain"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "foo\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("main_test.bal"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("foo.balx"); Assert.assertTrue(Files.exists(generatedBalx)); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); runMainFunction(projectPath, "foo"); runMainFunction(projectPath, generatedBalx.toString()); } @Test(description = "Test creating a project with a service in a package") public void testInitWithServiceInPackage() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesService"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "s\n", "foo\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Path serviceBalPath = projectPath.resolve("foo").resolve("hello_service.bal"); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(serviceBalPath)); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("hello_service_test.bal"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("foo.balx"); Assert.assertTrue(Files.exists(generatedBalx)); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); runService(projectPath.resolve("foo").resolve("hello_service.bal")); runService(generatedBalx); } @Test(description = "Test creating a project with a service and main in different packages") public void testInitWithMainServiceInDiffPackage() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("secondTestWithPackages"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "foo\n", "s\n", "bar\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("main.bal"))); Path serviceBalPath = projectPath.resolve("bar").resolve("hello_service.bal"); Assert.assertTrue(Files.exists(serviceBalPath)); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("main_test.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("bar").resolve("tests").resolve("hello_service_test.bal"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("foo.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("bar.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("bar").resolve("0.0.1").resolve("bar.zip"))); runMainFunction(projectPath, "foo"); runService(serviceBalPath); runMainFunction(projectPath, projectPath.resolve("target").resolve("foo.balx").toString()); runService(projectPath.resolve("target").resolve("bar.balx")); } @Test(description = "Test creating a project without going to interactive mode") public void testInitWithoutGoingToInteractiveMode() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("testWithoutPackage"); Files.createDirectories(projectPath); ballerinaServer.runMainWithClientOptions(new String[0], new String[0], getEnvVariables(), "init", projectPath.toString()); Path serviceBalPath = projectPath.resolve("hello_service.bal"); Assert.assertTrue(Files.exists(serviceBalPath)); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("hello_service.balx"); Assert.assertTrue(Files.exists(generatedBalx)); runService(serviceBalPath); runService(generatedBalx); } @Test(description = "Test creating a project with a main without a package") public void testInitWithoutPackage() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("testWithoutPackageForMain"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("main.balx"); Assert.assertTrue(Files.exists(generatedBalx)); runMainFunction(projectPath, projectPath.resolve("main.bal").toString()); runMainFunction(projectPath, projectPath.resolve("target").resolve("main.balx").toString()); } @Test(description = "Test running init without doing any changes on an already existing project", dependsOnMethods = "testInitWithMainInPackage") public void testInitOnExistingProject() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesMain"); ballerinaServer.runMainWithClientOptions(new String[0], new String[0], getEnvVariables(), "init", projectPath.toString()); Path packagePath = projectPath.resolve("foo"); Assert.assertTrue(Files.exists(packagePath.resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(packagePath.resolve("tests").resolve("main_test.bal"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("foo.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); runMainFunction(projectPath, "foo"); runMainFunction(projectPath, projectPath.resolve("target").resolve("foo.balx").toString()); } @Test(description = "Test running init on an already existing project and create a new package", dependsOnMethods = "testInitWithMainInPackage") public void testInitOnExistingProjectWithNewPackage() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesMain"); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "newpkg\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("newpkg").resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("newpkg").resolve("tests").resolve("main_test.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("main_test.bal"))); getNewInstanceOfBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("newpkg.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("foo.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("newpkg").resolve("0.0.1").resolve("newpkg.zip"))); runMainFunction(projectPath, "newpkg"); runMainFunction(projectPath, projectPath.resolve("target").resolve("newpkg.balx").toString()); } @Test(description = "Test creating a project with invalid options") public void testInitWithInvalidOptions() throws Exception { getNewInstanceOfBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("testsWithoutPackage"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "123\n", "jkl\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); } /** * Get new instance of the ballerina server. * * @throws BallerinaTestException */ private void getNewInstanceOfBallerinaServer() throws BallerinaTestException { ballerinaServer = new ServerInstance(serverZipPath); } /** * Run and test main function in project. * * @param projectPath path of the project * @param pkg package name or balx file path * @throws BallerinaTestException */ private void runMainFunction(Path projectPath, String pkg) throws BallerinaTestException { getNewInstanceOfBallerinaServer(); String[] clientArgsForRun = {"--sourceroot", projectPath.toString(), pkg}; LogLeecher logLeecher = new LogLeecher("Hello World!"); ballerinaServer.addLogLeecher(logLeecher); ballerinaServer.runMain(clientArgsForRun, getEnvVariables(), "run"); } /** * Run and test service in project. * * @param serviceBalPath path of the service bal file * @throws BallerinaTestException * @throws IOException */ private void runService(Path serviceBalPath) throws BallerinaTestException, IOException { ServerInstance ballerinaServerForService = ServerInstance.initBallerinaServer(); ballerinaServerForService.startBallerinaServer(serviceBalPath.toString()); HttpResponse response = HttpClientRequest.doGet(ballerinaServerForService.getServiceURLHttp("hello/sayHello")); Assert.assertEquals(response.getResponseCode(), 200, "Response code mismatched"); ballerinaServerForService.stopServer(); } /** * Get environment variables and add ballerina_home as a env variable the tmp directory. * * @return env directory variable array */ private String[] getEnvVariables() { List<String> variables = new ArrayList<>(); Map<String, String> envVarMap = System.getenv(); envVarMap.forEach((key, value) -> variables.add(key + "=" + value)); return variables.toArray(new String[variables.size()]); } @AfterClass private void cleanup() throws Exception { deleteFiles(tempProjectDirectory); } /** * Delete files inside directories. * * @param dirPath direectory path * @throws IOException throw an exception if an issue occurs */ private void deleteFiles(Path dirPath) throws IOException { Files.walk(dirPath) .sorted(Comparator.reverseOrder()) .forEach(path -> { try { Files.delete(path); } catch (IOException e) { Assert.fail(e.getMessage(), e); } }); } /** * Get org-name of user. * * @return org name */ }
class PackagingInitTestCase extends IntegrationTestCase { private String serverZipPath; private Path tempProjectDirectory; @BeforeClass() public void setUp() throws BallerinaTestException, IOException { tempProjectDirectory = Files.createTempDirectory("bal-test-integration-packaging-project-"); serverZipPath = System.getProperty(Constant.SYSTEM_PROP_SERVER_ZIP); } @Test(description = "Test creating a project with a main in a package") public void testInitWithMainInPackage() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesMain"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "foo\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("main_test.bal"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("foo.balx"); Assert.assertTrue(Files.exists(generatedBalx)); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); runMainFunction(projectPath, "foo"); runMainFunction(projectPath, generatedBalx.toString()); } @Test(description = "Test creating a project with a service in a package") public void testInitWithServiceInPackage() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesService"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "s\n", "foo\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Path serviceBalPath = projectPath.resolve("foo").resolve("hello_service.bal"); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(serviceBalPath)); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("hello_service_test.bal"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("foo.balx"); Assert.assertTrue(Files.exists(generatedBalx)); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); runService(projectPath.resolve("foo").resolve("hello_service.bal")); runService(generatedBalx); } @Test(description = "Test creating a project with a service and main in different packages") public void testInitWithMainServiceInDiffPackage() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("secondTestWithPackages"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "foo\n", "s\n", "bar\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("main.bal"))); Path serviceBalPath = projectPath.resolve("bar").resolve("hello_service.bal"); Assert.assertTrue(Files.exists(serviceBalPath)); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("main_test.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("bar").resolve("tests").resolve("hello_service_test.bal"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("foo.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("bar.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("bar").resolve("0.0.1").resolve("bar.zip"))); runMainFunction(projectPath, "foo"); runService(serviceBalPath); runMainFunction(projectPath, projectPath.resolve("target").resolve("foo.balx").toString()); runService(projectPath.resolve("target").resolve("bar.balx")); } @Test(description = "Test creating a project without going to interactive mode") public void testInitWithoutGoingToInteractiveMode() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("testWithoutPackage"); Files.createDirectories(projectPath); ballerinaServer.runMainWithClientOptions(new String[0], new String[0], getEnvVariables(), "init", projectPath.toString()); Path serviceBalPath = projectPath.resolve("hello_service.bal"); Assert.assertTrue(Files.exists(serviceBalPath)); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("hello_service.balx"); Assert.assertTrue(Files.exists(generatedBalx)); runService(serviceBalPath); runService(generatedBalx); } @Test(description = "Test creating a project with a main without a package") public void testInitWithoutPackage() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("testWithoutPackageForMain"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Path generatedBalx = projectPath.resolve("target").resolve("main.balx"); Assert.assertTrue(Files.exists(generatedBalx)); runMainFunction(projectPath, projectPath.resolve("main.bal").toString()); runMainFunction(projectPath, projectPath.resolve("target").resolve("main.balx").toString()); } @Test(description = "Test running init without doing any changes on an already existing project", dependsOnMethods = "testInitWithMainInPackage") public void testInitOnExistingProject() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesMain"); ballerinaServer.runMainWithClientOptions(new String[0], new String[0], getEnvVariables(), "init", projectPath.toString()); Path packagePath = projectPath.resolve("foo"); Assert.assertTrue(Files.exists(packagePath.resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); Assert.assertTrue(Files.exists(packagePath.resolve("tests").resolve("main_test.bal"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("foo.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); runMainFunction(projectPath, "foo"); runMainFunction(projectPath, projectPath.resolve("target").resolve("foo.balx").toString()); } @Test(description = "Test running init on an already existing project and create a new package", dependsOnMethods = "testInitWithMainInPackage") public void testInitOnExistingProjectWithNewPackage() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("firstTestWithPackagesMain"); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "m\n", "newpkg\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("newpkg").resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("newpkg").resolve("tests").resolve("main_test.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("main.bal"))); Assert.assertTrue(Files.exists(projectPath.resolve("foo").resolve("tests").resolve("main_test.bal"))); ballerinaServer = createNewBallerinaServer(); ballerinaServer.runMain(new String[0], getEnvVariables(), "build", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("newpkg.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve("target").resolve("foo.balx"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("foo").resolve("0.0.1").resolve("foo.zip"))); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina").resolve("repo").resolve(getOrgName()) .resolve("newpkg").resolve("0.0.1").resolve("newpkg.zip"))); runMainFunction(projectPath, "newpkg"); runMainFunction(projectPath, projectPath.resolve("target").resolve("newpkg.balx").toString()); } @Test(description = "Test creating a project with invalid options") public void testInitWithInvalidOptions() throws Exception { ServerInstance ballerinaServer = createNewBallerinaServer(); Path projectPath = tempProjectDirectory.resolve("testsWithoutPackage"); Files.createDirectories(projectPath); String[] clientArgsForInit = {"-i"}; String[] options = {"\n", "\n", "\n", "123\n", "jkl\n", "f\n"}; ballerinaServer.runMainWithClientOptions(clientArgsForInit, options, getEnvVariables(), "init", projectPath.toString()); Assert.assertTrue(Files.exists(projectPath.resolve(".ballerina"))); Assert.assertTrue(Files.exists(projectPath.resolve("Ballerina.toml"))); } /** * Get new instance of the ballerina server. * * @return new ballerina server instance * @throws BallerinaTestException */ private ServerInstance createNewBallerinaServer() throws BallerinaTestException { return new ServerInstance(serverZipPath); } /** * Run and test main function in project. * * @param projectPath path of the project * @param pkg package name or balx file path * @throws BallerinaTestException */ private void runMainFunction(Path projectPath, String pkg) throws BallerinaTestException { ServerInstance ballerinaServer = createNewBallerinaServer(); String[] clientArgsForRun = {"--sourceroot", projectPath.toString(), pkg}; LogLeecher logLeecher = new LogLeecher("Hello World!"); ballerinaServer.addLogLeecher(logLeecher); ballerinaServer.runMain(clientArgsForRun, getEnvVariables(), "run"); } /** * Run and test service in project. * * @param serviceBalPath path of the service bal file * @throws BallerinaTestException * @throws IOException */ private void runService(Path serviceBalPath) throws BallerinaTestException, IOException { ServerInstance ballerinaServerForService = ServerInstance.initBallerinaServer(); ballerinaServerForService.startBallerinaServer(serviceBalPath.toString()); HttpResponse response = HttpClientRequest.doGet(ballerinaServerForService.getServiceURLHttp("hello/sayHello")); Assert.assertEquals(response.getResponseCode(), 200, "Response code mismatched"); ballerinaServerForService.stopServer(); } /** * Get environment variables and add ballerina_home as a env variable the tmp directory. * * @return env directory variable array */ private String[] getEnvVariables() { List<String> variables = new ArrayList<>(); Map<String, String> envVarMap = System.getenv(); envVarMap.forEach((key, value) -> variables.add(key + "=" + value)); return variables.toArray(new String[variables.size()]); } @AfterClass private void cleanup() throws Exception { deleteFiles(tempProjectDirectory); } /** * Delete files inside directories. * * @param dirPath direectory path * @throws IOException throw an exception if an issue occurs */ private void deleteFiles(Path dirPath) throws IOException { Files.walk(dirPath) .sorted(Comparator.reverseOrder()) .forEach(path -> { try { Files.delete(path); } catch (IOException e) { Assert.fail(e.getMessage(), e); } }); } /** * Get org-name of user. * * @return org name */ }
Yes, unfortunately the open project that I used for POM parsing only parses a BOM file (packaging as POM). I will be removing that dependency completely due to some other issues like it does not support adding comments to the POM file (which we need for versioning management in engsys).
static List<BomDependency> parsePomFileContent(Reader responseStream) { List<BomDependency> bomDependencies = new ArrayList<>(); ObjectMapper mapper = new XmlMapper(); try { HashMap<String, Object> value = mapper.readValue(responseStream, HashMap.class); Object packagingProp = value.getOrDefault("packaging", null); if(packagingProp != null && packagingProp.toString().equalsIgnoreCase("pom")) { return parsePomFileContent(responseStream); } HashMap<String, Object> dependenciesTag = (HashMap<String, Object>)value.getOrDefault("dependencies", null); if(dependenciesTag == null) { return null; } ArrayList<HashMap<String, Object>> dependencies = (ArrayList<HashMap<String, Object>>) dependenciesTag.getOrDefault("dependency", null); for(HashMap<String, Object> dependency: dependencies) { String groupId = (String) dependency.getOrDefault("groupId", null); String artifactId = (String) dependency.getOrDefault("artifactId", null); String version = (String) dependency.getOrDefault("version", null); String scope = (String) dependency.getOrDefault("scope", ScopeType.COMPILE.toString()); ScopeType scopeType = ScopeType.COMPILE; switch(scope) { case "test" : scopeType = ScopeType.TEST; break; default: scopeType = ScopeType.COMPILE; } bomDependencies.add(new BomDependency(groupId, artifactId, version, scopeType)); } } catch (IOException exception) { exception.printStackTrace(); } return bomDependencies.stream().distinct().collect(Collectors.toList()); }
ObjectMapper mapper = new XmlMapper();
static List<BomDependency> parsePomFileContent(Reader responseStream) { List<BomDependency> bomDependencies = new ArrayList<>(); ObjectMapper mapper = new XmlMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); try { Model value = mapper.readValue(responseStream, Model.class); List<Dependency> dependencies = value.getDependencies(); if(dependencies == null) { return bomDependencies; } for(Dependency dependency : dependencies) { ScopeType scopeType = ScopeType.COMPILE; if("test".equals(dependency.getScope())) { scopeType = ScopeType.TEST; } bomDependencies.add(new BomDependency( dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion(), scopeType)); } } catch (IOException exception) { exception.printStackTrace(); } return bomDependencies.stream().distinct().collect(Collectors.toList()); }
class Utils { public static final String COMMANDLINE_INPUTDIRECTORY = "inputdir"; public static final String COMMANDLINE_OUTPUTDIRECTORY = "outputdir"; public static final String EMPTY_STRING = ""; public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_OVERRIDDEN_INPUTDEPENDENCIES_FILE = "inputdependenciesfile"; public static final String COMMANDLINE_REPORTFILE = "reportfile"; public static final String COMMANDLINE_MODE = "mode"; public static final String ANALYZE_MODE = "analyze"; public static final String GENERATE_MODE = "generate"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final Pattern STRING_SPLIT_BY_COLON = Pattern.compile("[:]"); public static final Pattern INPUT_DEPENDENCY_PATTERN = Pattern.compile("(.+);(.*)"); public static final String PROJECT_VERSION = "project.version"; public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final HashSet<String> IGNORE_CONFLICT_LIST = new HashSet<>(/*Arrays.asList( "slf4j-api" )*/); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); static void validateNotNullOrEmpty(String argValue, String argName) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } static void validateNotNullOrEmpty(String[] argValue, String argName) { if(Arrays.stream(argValue).anyMatch(value -> value == null || value.isEmpty())) { throw new IllegalArgumentException(String.format("%s can't be null", argName)); } } static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } static boolean isPublishedArtifact(BomDependency dependency) { try { return getResolvedArtifact(dependency) != null; } catch (Exception ex) { logger.error(ex.toString()); } return false; } static MavenResolvedArtifact getResolvedArtifact(MavenDependency dependency) { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return mavenResolvedArtifact; } static void validateNull(String argValue, String argName) { if(argValue != null) { throw new IllegalArgumentException(String.format("%s should be null", argName)); } } static void validateValues(String argName, String argValue, String ... expectedValues) { if(Arrays.stream(expectedValues).noneMatch(a -> a.equals(argValue))) { throw new IllegalArgumentException(String.format("%s must match %s", argName, Arrays.toString(expectedValues))); } } static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { try (InputStreamReader reader = new InputStreamReader(response.body())) { return Utils.parsePomFileContent(reader); } catch (IOException ex) { logger.error("Failed to read contents for {}", dependency.toString()); } } return null; }).join(); } static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } static List<BomDependency> parsePomFileContent(String fileName) { try (FileReader reader = new FileReader(fileName)) { return parsePomFileContent(reader); } catch (IOException exception) { logger.error("Failed to read the contents of the pom file: {}", fileName); } return new ArrayList<>(); } static List<BomDependency> parseBomFileContent(Reader responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); if(version.equals(PROJECT_VERSION)) { version = model.getVersion(); } } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTDIRECTORY = "inputdir"; public static final String COMMANDLINE_OUTPUTDIRECTORY = "outputdir"; public static final String COMMANDLINE_MODE = "mode"; public static final String ANALYZE_MODE = "analyze"; public static final String GENERATE_MODE = "generate"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final Pattern STRING_SPLIT_BY_COLON = Pattern.compile("[:]"); public static final Pattern INPUT_DEPENDENCY_PATTERN = Pattern.compile("(.+);(.*)"); public static final String PROJECT_VERSION = "project.version"; public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final HashSet<String> IGNORE_CONFLICT_LIST = new HashSet<>(/*Arrays.asList( "slf4j-api" )*/); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); static void validateNotNullOrEmpty(String argValue, String argName) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } static void validateNotNullOrEmpty(String[] argValue, String argName) { if(Arrays.stream(argValue).anyMatch(value -> value == null || value.isEmpty())) { throw new IllegalArgumentException(String.format("%s can't be null", argName)); } } static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } static boolean isPublishedArtifact(BomDependency dependency) { try { return getResolvedArtifact(dependency) != null; } catch (Exception ex) { logger.error(ex.toString()); } return false; } static MavenResolvedArtifact getResolvedArtifact(MavenDependency dependency) { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return mavenResolvedArtifact; } static void validateNull(String argValue, String argName) { if(argValue != null) { throw new IllegalArgumentException(String.format("%s should be null", argName)); } } static void validateValues(String argName, String argValue, String ... expectedValues) { if(Arrays.stream(expectedValues).noneMatch(a -> a.equals(argValue))) { throw new IllegalArgumentException(String.format("%s must match %s", argName, Arrays.toString(expectedValues))); } } static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { try (InputStreamReader reader = new InputStreamReader(response.body())) { return Utils.parsePomFileContent(reader); } catch (IOException ex) { logger.error("Failed to read contents for {}", dependency.toString()); } } return null; }).join(); } static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } static List<BomDependency> parsePomFileContent(String fileName) { try (FileReader reader = new FileReader(fileName)) { return parsePomFileContent(reader); } catch (IOException exception) { logger.error("Failed to read the contents of the pom file: {}", fileName); } return new ArrayList<>(); } static List<BomDependency> parseBomFileContent(Reader responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); if(version.equals(PROJECT_VERSION)) { version = model.getVersion(); } } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
The recording for this test turned out empty because we are not waiting for the async operation to complete. ```suggestion }).verifyComplete(); ```
public void importPemCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) throws IOException { createCertificateAsyncClient(httpClient, serviceVersion); importPemCertificateRunner((importCertificateOptions) -> { StepVerifier.create(certificateAsyncClient.importCertificate(importCertificateOptions)) .assertNext(importedCertificate -> { assertEquals(importCertificateOptions.isEnabled(), importedCertificate.getProperties().isEnabled()); assertEquals(CertificateContentType.PEM, importedCertificate.getPolicy().getContentType()); }); }); }
});
public void importPemCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) throws IOException { createCertificateAsyncClient(httpClient, serviceVersion); importPemCertificateRunner((importCertificateOptions) -> { StepVerifier.create(certificateAsyncClient.importCertificate(importCertificateOptions)) .assertNext(importedCertificate -> { assertEquals(importCertificateOptions.isEnabled(), importedCertificate.getProperties().isEnabled()); assertEquals(CertificateContentType.PEM, importedCertificate.getPolicy().getContentType()); }).verifyComplete(); }); }
class CertificateAsyncClientTest extends CertificateClientTestBase { private CertificateAsyncClient certificateAsyncClient; @Override protected void beforeTest() { beforeTestSetup(); } private void createCertificateAsyncClient(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion, null); } private void createCertificateAsyncClient(HttpClient httpClient, CertificateServiceVersion serviceVersion, String testTenantId) { HttpPipeline httpPipeline = getHttpPipeline(httpClient, testTenantId); certificateAsyncClient = spy(new CertificateClientBuilder() .vaultUrl(getEndpoint()) .pipeline(httpPipeline) .serviceVersion(serviceVersion) .buildAsyncClient()); if (interceptorManager.isPlaybackMode()) { when(certificateAsyncClient.getDefaultPollingInterval()).thenReturn(Duration.ofMillis(10)); } } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); createCertificateRunner((certificatePolicy) -> { String certName = testResourceNamer.randomName("testCert", 25); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, certificatePolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expected -> { assertEquals(certName, expected.getName()); assertNotNull(expected.getProperties().getCreatedOn()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateWithMultipleTenants(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion, testResourceNamer.randomUuid()); createCertificateRunner((certificatePolicy) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, certificatePolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expected -> { assertEquals(certName, expected.getName()); assertNotNull(expected.getProperties().getCreatedOn()); }).verifyComplete(); }); KeyVaultCredentialPolicy.clearCache(); createCertificateRunner((certificatePolicy) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, certificatePolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expected -> { assertEquals(certName, expected.getName()); assertNotNull(expected.getProperties().getCreatedOn()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateEmptyName(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginCreateCertificate("", CertificatePolicy.getDefault())) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_BAD_METHOD)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateNullPolicy(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginCreateCertificate(testResourceNamer.randomName("tempCert", 20), null)) .verifyErrorSatisfies(e -> assertEquals(NullPointerException.class, e.getClass())); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateNull(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginCreateCertificate(null, null)) .verifyErrorSatisfies(e -> assertEquals(NullPointerException.class, e.getClass())); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateCertificateRunner((originalTags, updatedTags) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault(), true, originalTags); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(cert -> certificateAsyncClient .updateCertificateProperties(cert.getProperties().setTags(updatedTags)))) .assertNext(cert -> { validateMapResponse(updatedTags, cert.getProperties().getTags()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateDisabledCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateDisabledCertificateRunner((originalTags, updatedTags) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault(), false, originalTags); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(cert -> certificateAsyncClient .updateCertificateProperties(cert.getProperties().setTags(updatedTags)))) .assertNext(cert -> { validateMapResponse(updatedTags, cert.getProperties().getTags()); assertFalse(cert.getProperties().isEnabled()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expectedCert -> certificateAsyncClient.getCertificate(certificateName) .map(returnedCert -> validatePolicy(expectedCert.getPolicy(), returnedCert.getPolicy()))) .verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateSpecificVersion(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateSpecificVersionRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expectedCert -> certificateAsyncClient.getCertificateVersion(certificateName, expectedCert.getProperties().getVersion()) .map(returnedCert -> validateCertificate(expectedCert, returnedCert))) .verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); deleteCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.beginDeleteCertificate(certificateName) .last().flatMap(asyncPollResponse -> Mono.defer(() -> Mono.just(asyncPollResponse.getValue()))) )) .assertNext(expectedCert -> { assertNotNull(expectedCert.getDeletedOn()); assertNotNull(expectedCert.getRecoveryId()); assertNotNull(expectedCert.getScheduledPurgeDate()); assertEquals(certificateName, expectedCert.getName()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginDeleteCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getDeletedCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.beginDeleteCertificate(certificateName) .last().flatMap(asyncPollResponse -> Mono.just(asyncPollResponse.getValue())))) .assertNext(deletedCertificate -> { assertNotNull(deletedCertificate.getDeletedOn()); assertNotNull(deletedCertificate.getRecoveryId()); assertNotNull(deletedCertificate.getScheduledPurgeDate()); assertEquals(certificateName, deletedCertificate.getName()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getDeletedCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); recoverDeletedKeyRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); AtomicReference<KeyVaultCertificateWithPolicy> createdCertificate = new AtomicReference<>(); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { createdCertificate.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.beginDeleteCertificate(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(asyncPollResponse -> Mono.just(asyncPollResponse.getValue())); }) .flatMap(ignored -> certificateAsyncClient.beginRecoverDeletedCertificate(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(certAsyncResponse -> Mono.just(certAsyncResponse.getValue())))) .assertNext(recoveredCert -> { assertEquals(certificateName, recoveredCert.getName()); validateCertificate(createdCertificate.get(), recoveredCert); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginRecoverDeletedCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); backupCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.backupCertificate(certificateName))) .assertNext(backupBytes -> { assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.backupCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); restoreCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); AtomicReference<KeyVaultCertificateWithPolicy> createdCertificate = new AtomicReference<>(); AtomicReference<Byte[]> backup = new AtomicReference<>(); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { createdCertificate.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.backupCertificate(certificateName) .flatMap(backupBytes -> { Byte[] bytes = new Byte[backupBytes.length]; int i = 0; for (Byte bt : backupBytes) { bytes[i] = bt; i++; } backup.set(bytes); return Mono.just(backupBytes); }); })) .assertNext(backupBytes -> { assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }).verifyComplete(); StepVerifier.create(certificateAsyncClient.beginDeleteCertificate(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().then(Mono.defer(() -> certificateAsyncClient.purgeDeletedCertificate(certificateName))) .then(Mono.just("complete"))) .assertNext(input -> assertEquals("complete", input)) .verifyComplete(); sleepInRecordMode(40000); StepVerifier.create(Mono.defer(() -> { byte[] backupBytes = new byte[backup.get().length]; for (int i = 0; i < backup.get().length; i++) { backupBytes[i] = backup.get()[i]; } return certificateAsyncClient.restoreCertificateBackup(backupBytes); })).assertNext(restoredCertificate -> { assertEquals(certificateName, restoredCertificate.getName()); validatePolicy(restoredCertificate.getPolicy(), createdCertificate.get().getPolicy()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateOperation(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateOperationRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, setupPolicy()); AtomicReference<KeyVaultCertificateWithPolicy> expectedCert = new AtomicReference<>(); StepVerifier.create( certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { expectedCert.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.getCertificateOperation(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult); })) .assertNext(retrievedCert -> { validateCertificate(expectedCert.get(), retrievedCert); validatePolicy(expectedCert.get().getPolicy(), retrievedCert.getPolicy()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void cancelCertificateOperation(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); cancelCertificateOperationRunner((certName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller.takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.IN_PROGRESS) .last().flatMap(AsyncPollResponse::cancelOperation)) .assertNext(certificateOperation -> { assertTrue(certificateOperation.getCancellationRequested()); }).verifyComplete(); StepVerifier.create(certPoller.takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.USER_CANCELLED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(certificate -> { assertFalse(certificate.getProperties().isEnabled()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateOperation(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); deleteCertificateOperationRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.deleteCertificateOperation(certificateName))) .assertNext(certificateOperation -> { assertEquals("completed", certificateOperation.getStatus()); }).verifyComplete(); StepVerifier.create(certificateAsyncClient.deleteCertificateOperation(certificateName)) .verifyErrorSatisfies(e -> { assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); }); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificatePolicy(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificatePolicyRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, setupPolicy()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(certificate -> { validatePolicy(setupPolicy(), certificate.getPolicy()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateCertificatePolicy(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateCertificatePolicyRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, setupPolicy()); AtomicReference<KeyVaultCertificateWithPolicy> createdCert = new AtomicReference<>(); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { keyVaultCertificateWithPolicy.getPolicy().setExportable(false); createdCert.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.updateCertificatePolicy(certificateName, keyVaultCertificateWithPolicy.getPolicy()); })) .assertNext(certificatePolicy -> validatePolicy(createdCert.get().getPolicy(), certificatePolicy)).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreCertificateFromMalformedBackup(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); byte[] keyBackupBytes = "non-existing".getBytes(); StepVerifier.create(certificateAsyncClient.restoreCertificateBackup(keyBackupBytes)) .verifyErrorSatisfies(e -> { assertResponseException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listCertificates(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); listCertificatesRunner((certificatesToList) -> { HashSet<String> certificates = new HashSet<>(certificatesToList); StepVerifier.create( Flux.fromIterable(certificates) .map(certName -> certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED).last()) .last().map(ignored -> certificateAsyncClient.listPropertiesOfCertificates() .map(certificate -> { certificates.remove(certificate.getName()); return Mono.empty(); }))) .assertNext(ignore -> { assertEquals(0, certificates.size()); }); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listPropertiesOfCertificates(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); listPropertiesOfCertificatesRunner((certificatesToList) -> { HashSet<String> certificates = new HashSet<>(certificatesToList); StepVerifier.create( Flux.fromIterable(certificates) .map(certName -> certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED).last()) .last().map(ignored -> certificateAsyncClient.listPropertiesOfCertificates(false) .map(certificate -> { certificates.remove(certificate.getName()); return Mono.empty(); }))) .assertNext(ignore -> { assertEquals(0, certificates.size()); }); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); createIssuerRunner((issuer) -> { StepVerifier.create(certificateAsyncClient.createIssuer(issuer)) .assertNext(createdIssuer -> { assertTrue(issuerCreatedCorrectly(issuer, createdIssuer)); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuerEmptyName(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.createIssuer(new CertificateIssuer("", ""))) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_BAD_METHOD)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuerNullProvider(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.createIssuer(new CertificateIssuer("", null))) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_BAD_METHOD)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuerNull(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.createIssuer(null)) .verifyErrorSatisfies(e -> assertEquals(NullPointerException.class, e.getClass())); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateIssuerRunner((issuer) -> { AtomicReference<CertificateIssuer> certificateIssuer = new AtomicReference<>(); StepVerifier.create(certificateAsyncClient.createIssuer(issuer) .flatMap(createdIssuer -> { certificateIssuer.set(createdIssuer); return certificateAsyncClient.getIssuer(issuer.getName()); })) .assertNext(retrievedIssuer -> { assertTrue(issuerCreatedCorrectly(certificateIssuer.get(), retrievedIssuer)); }); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateIssuerNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.backupCertificate("non-existing")) .verifyErrorSatisfies(e -> { assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); deleteCertificateIssuerRunner((issuer) -> { AtomicReference<CertificateIssuer> createdIssuer = new AtomicReference<>(); StepVerifier.create(certificateAsyncClient.createIssuer(issuer) .flatMap(certificateIssuer -> { createdIssuer.set(certificateIssuer); return certificateAsyncClient.deleteIssuer(issuer.getName()); })) .assertNext(deletedIssuer -> { assertTrue(issuerCreatedCorrectly(createdIssuer.get(), deletedIssuer)); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateIssuerNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.backupCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listCertificateIssuers(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); listCertificateIssuersRunner((certificateIssuers) -> { HashMap<String, CertificateIssuer> certificateIssuersToList = new HashMap<>(certificateIssuers); List<IssuerProperties> output = new ArrayList<>(); StepVerifier.create(Flux.fromIterable(certificateIssuers.values()) .flatMap(issuer -> certificateAsyncClient.createIssuerWithResponse(issuer)).last().map(ignored -> certificateAsyncClient.listPropertiesOfIssuers() .map(issuerProperties -> { output.add(issuerProperties); return Mono.empty(); }))) .assertNext(ignore -> assertEquals(certificateIssuersToList.size(), output.size())); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateIssuerRunner((issuerToCreate, issuerToUpdate) -> { StepVerifier.create(certificateAsyncClient.createIssuer(issuerToCreate) .flatMap(createdIssuer -> certificateAsyncClient.updateIssuer(issuerToUpdate))) .assertNext(updatedIssuer -> assertTrue(issuerUpdatedCorrectly(issuerToCreate, updatedIssuer))).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void setContacts(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); List<CertificateContact> contacts = Arrays.asList(setupContact()); StepVerifier.create(certificateAsyncClient.setContacts(contacts)) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); StepVerifier.create(certificateAsyncClient.deleteContacts().then(Mono.just("complete"))) .assertNext(input -> assertEquals("complete", input)).verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void listContacts(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); List<CertificateContact> contacts = Arrays.asList(setupContact()); StepVerifier.create(certificateAsyncClient.setContacts(contacts)) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); sleepInRecordMode(6000); StepVerifier.create(certificateAsyncClient.listContacts()) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void deleteContacts(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); List<CertificateContact> contacts = Arrays.asList(setupContact()); StepVerifier.create(certificateAsyncClient.setContacts(contacts)) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); StepVerifier.create(certificateAsyncClient.deleteContacts()) .assertNext(contact -> { validateContact(setupContact(), contact); }).verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateOperationNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getCertificateOperation("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificatePolicyNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getCertificatePolicy("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listCertificateVersions(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); String certName = testResourceNamer.randomName("testListCertVersion", 25); int versionsToCreate = 5; for (int i = 0; i < versionsToCreate; i++) { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult).then(Mono.just("complete"))).assertNext(input -> assertEquals("complete", input)).verifyComplete(); } AtomicInteger createdVersions = new AtomicInteger(); StepVerifier.create(certificateAsyncClient.listPropertiesOfCertificateVersions(certName) .map(certificateProperties -> { createdVersions.getAndIncrement(); return Mono.just("complete"); }).last()).assertNext(ignored -> assertEquals(versionsToCreate, createdVersions.get())).verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listDeletedCertificates(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); if (interceptorManager.isLiveMode()) { return; } listDeletedCertificatesRunner((certificates) -> { HashSet<String> certificatesToDelete = new HashSet<>(certificates); for (String certName : certificatesToDelete) { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED).last() .then(Mono.just("complete"))).assertNext(input -> assertEquals("complete", input)).verifyComplete(); } for (String certName : certificates) { PollerFlux<DeletedCertificate, Void> poller = certificateAsyncClient.beginDeleteCertificate(certName); StepVerifier.create(poller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last()).assertNext(asyncPollResponse -> assertNotNull(asyncPollResponse.getValue())).verifyComplete(); } sleepInRecordMode(4000); StepVerifier.create(certificateAsyncClient.listDeletedCertificates() .map(deletedCertificate -> { certificatesToDelete.remove(deletedCertificate.getName()); return Mono.just("complete"); }).last()) .assertNext(ignored -> { assertEquals(0, certificatesToDelete.size()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void importCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); importCertificateRunner((importCertificateOptions) -> { StepVerifier.create(certificateAsyncClient.importCertificate(importCertificateOptions)) .assertNext(importedCertificate -> { assertTrue(toHexString(importedCertificate.getProperties().getX509Thumbprint()) .equalsIgnoreCase("7cb8b7539d87ba7215357b9b9049dff2d3fa59ba")); assertEquals(importCertificateOptions.isEnabled(), importedCertificate.getProperties().isEnabled()); X509Certificate x509Certificate = null; try { x509Certificate = loadCerToX509Certificate(importedCertificate); } catch (CertificateException | IOException e) { e.printStackTrace(); fail(); } assertEquals("CN=KeyVaultTest", x509Certificate.getSubjectX500Principal().getName()); assertEquals("CN=Root Agency", x509Certificate.getIssuerX500Principal().getName()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void mergeCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.mergeCertificate( new MergeCertificateOptions(testResourceNamer.randomName("testCert", 20), Arrays.asList("test".getBytes())))) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") }
class CertificateAsyncClientTest extends CertificateClientTestBase { private CertificateAsyncClient certificateAsyncClient; @Override protected void beforeTest() { beforeTestSetup(); } private void createCertificateAsyncClient(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion, null); } private void createCertificateAsyncClient(HttpClient httpClient, CertificateServiceVersion serviceVersion, String testTenantId) { HttpPipeline httpPipeline = getHttpPipeline(httpClient, testTenantId); certificateAsyncClient = spy(new CertificateClientBuilder() .vaultUrl(getEndpoint()) .pipeline(httpPipeline) .serviceVersion(serviceVersion) .buildAsyncClient()); if (interceptorManager.isPlaybackMode()) { when(certificateAsyncClient.getDefaultPollingInterval()).thenReturn(Duration.ofMillis(10)); } } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); createCertificateRunner((certificatePolicy) -> { String certName = testResourceNamer.randomName("testCert", 25); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, certificatePolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expected -> { assertEquals(certName, expected.getName()); assertNotNull(expected.getProperties().getCreatedOn()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateWithMultipleTenants(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion, testResourceNamer.randomUuid()); createCertificateRunner((certificatePolicy) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, certificatePolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expected -> { assertEquals(certName, expected.getName()); assertNotNull(expected.getProperties().getCreatedOn()); }).verifyComplete(); }); KeyVaultCredentialPolicy.clearCache(); createCertificateRunner((certificatePolicy) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, certificatePolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expected -> { assertEquals(certName, expected.getName()); assertNotNull(expected.getProperties().getCreatedOn()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateEmptyName(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginCreateCertificate("", CertificatePolicy.getDefault())) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_BAD_METHOD)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateNullPolicy(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginCreateCertificate(testResourceNamer.randomName("tempCert", 20), null)) .verifyErrorSatisfies(e -> assertEquals(NullPointerException.class, e.getClass())); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createCertificateNull(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginCreateCertificate(null, null)) .verifyErrorSatisfies(e -> assertEquals(NullPointerException.class, e.getClass())); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateCertificateRunner((originalTags, updatedTags) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault(), true, originalTags); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(cert -> certificateAsyncClient .updateCertificateProperties(cert.getProperties().setTags(updatedTags)))) .assertNext(cert -> { validateMapResponse(updatedTags, cert.getProperties().getTags()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateDisabledCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateDisabledCertificateRunner((originalTags, updatedTags) -> { String certName = testResourceNamer.randomName("testCert", 20); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault(), false, originalTags); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(cert -> certificateAsyncClient .updateCertificateProperties(cert.getProperties().setTags(updatedTags)))) .assertNext(cert -> { validateMapResponse(updatedTags, cert.getProperties().getTags()); assertFalse(cert.getProperties().isEnabled()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expectedCert -> certificateAsyncClient.getCertificate(certificateName) .map(returnedCert -> validatePolicy(expectedCert.getPolicy(), returnedCert.getPolicy()))) .verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateSpecificVersion(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateSpecificVersionRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(expectedCert -> certificateAsyncClient.getCertificateVersion(certificateName, expectedCert.getProperties().getVersion()) .map(returnedCert -> validateCertificate(expectedCert, returnedCert))) .verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); deleteCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.beginDeleteCertificate(certificateName) .last().flatMap(asyncPollResponse -> Mono.defer(() -> Mono.just(asyncPollResponse.getValue()))) )) .assertNext(expectedCert -> { assertNotNull(expectedCert.getDeletedOn()); assertNotNull(expectedCert.getRecoveryId()); assertNotNull(expectedCert.getScheduledPurgeDate()); assertEquals(certificateName, expectedCert.getName()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginDeleteCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getDeletedCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.beginDeleteCertificate(certificateName) .last().flatMap(asyncPollResponse -> Mono.just(asyncPollResponse.getValue())))) .assertNext(deletedCertificate -> { assertNotNull(deletedCertificate.getDeletedOn()); assertNotNull(deletedCertificate.getRecoveryId()); assertNotNull(deletedCertificate.getScheduledPurgeDate()); assertEquals(certificateName, deletedCertificate.getName()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getDeletedCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); recoverDeletedKeyRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); AtomicReference<KeyVaultCertificateWithPolicy> createdCertificate = new AtomicReference<>(); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { createdCertificate.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.beginDeleteCertificate(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(asyncPollResponse -> Mono.just(asyncPollResponse.getValue())); }) .flatMap(ignored -> certificateAsyncClient.beginRecoverDeletedCertificate(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(certAsyncResponse -> Mono.just(certAsyncResponse.getValue())))) .assertNext(recoveredCert -> { assertEquals(certificateName, recoveredCert.getName()); validateCertificate(createdCertificate.get(), recoveredCert); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.beginRecoverDeletedCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); backupCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.backupCertificate(certificateName))) .assertNext(backupBytes -> { assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.backupCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); restoreCertificateRunner((certificateName) -> { CertificatePolicy initialPolicy = setupPolicy(); PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, initialPolicy); AtomicReference<KeyVaultCertificateWithPolicy> createdCertificate = new AtomicReference<>(); AtomicReference<Byte[]> backup = new AtomicReference<>(); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { createdCertificate.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.backupCertificate(certificateName) .flatMap(backupBytes -> { Byte[] bytes = new Byte[backupBytes.length]; int i = 0; for (Byte bt : backupBytes) { bytes[i] = bt; i++; } backup.set(bytes); return Mono.just(backupBytes); }); })) .assertNext(backupBytes -> { assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }).verifyComplete(); StepVerifier.create(certificateAsyncClient.beginDeleteCertificate(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().then(Mono.defer(() -> certificateAsyncClient.purgeDeletedCertificate(certificateName))) .then(Mono.just("complete"))) .assertNext(input -> assertEquals("complete", input)) .verifyComplete(); sleepInRecordMode(40000); StepVerifier.create(Mono.defer(() -> { byte[] backupBytes = new byte[backup.get().length]; for (int i = 0; i < backup.get().length; i++) { backupBytes[i] = backup.get()[i]; } return certificateAsyncClient.restoreCertificateBackup(backupBytes); })).assertNext(restoredCertificate -> { assertEquals(certificateName, restoredCertificate.getName()); validatePolicy(restoredCertificate.getPolicy(), createdCertificate.get().getPolicy()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateOperation(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateOperationRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, setupPolicy()); AtomicReference<KeyVaultCertificateWithPolicy> expectedCert = new AtomicReference<>(); StepVerifier.create( certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { expectedCert.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.getCertificateOperation(certificateName) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult); })) .assertNext(retrievedCert -> { validateCertificate(expectedCert.get(), retrievedCert); validatePolicy(expectedCert.get().getPolicy(), retrievedCert.getPolicy()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void cancelCertificateOperation(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); cancelCertificateOperationRunner((certName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller.takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.IN_PROGRESS) .last().flatMap(AsyncPollResponse::cancelOperation)) .assertNext(certificateOperation -> { assertTrue(certificateOperation.getCancellationRequested()); }).verifyComplete(); StepVerifier.create(certPoller.takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.USER_CANCELLED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(certificate -> { assertFalse(certificate.getProperties().isEnabled()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateOperation(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); deleteCertificateOperationRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(ignored -> certificateAsyncClient.deleteCertificateOperation(certificateName))) .assertNext(certificateOperation -> { assertEquals("completed", certificateOperation.getStatus()); }).verifyComplete(); StepVerifier.create(certificateAsyncClient.deleteCertificateOperation(certificateName)) .verifyErrorSatisfies(e -> { assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); }); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificatePolicy(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificatePolicyRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, setupPolicy()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult)) .assertNext(certificate -> { validatePolicy(setupPolicy(), certificate.getPolicy()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateCertificatePolicy(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateCertificatePolicyRunner((certificateName) -> { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certificateName, setupPolicy()); AtomicReference<KeyVaultCertificateWithPolicy> createdCert = new AtomicReference<>(); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult) .flatMap(keyVaultCertificateWithPolicy -> { keyVaultCertificateWithPolicy.getPolicy().setExportable(false); createdCert.set(keyVaultCertificateWithPolicy); return certificateAsyncClient.updateCertificatePolicy(certificateName, keyVaultCertificateWithPolicy.getPolicy()); })) .assertNext(certificatePolicy -> validatePolicy(createdCert.get().getPolicy(), certificatePolicy)).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreCertificateFromMalformedBackup(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); byte[] keyBackupBytes = "non-existing".getBytes(); StepVerifier.create(certificateAsyncClient.restoreCertificateBackup(keyBackupBytes)) .verifyErrorSatisfies(e -> { assertResponseException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listCertificates(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); listCertificatesRunner((certificatesToList) -> { HashSet<String> certificates = new HashSet<>(certificatesToList); for (String certName : certificates) { StepVerifier.create(certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED).last()) .assertNext(response -> assertNotNull(response.getValue())) .verifyComplete(); } StepVerifier.create(certificateAsyncClient.listPropertiesOfCertificates() .map(certificate -> { certificates.remove(certificate.getName()); return Mono.empty(); }).last()) .assertNext(ignore -> { assertEquals(0, certificates.size()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listPropertiesOfCertificates(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); listPropertiesOfCertificatesRunner((certificatesToList) -> { HashSet<String> certificates = new HashSet<>(certificatesToList); for (String certName : certificates) { StepVerifier.create(certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()) .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED).last()) .assertNext(response -> assertNotNull(response.getValue())) .verifyComplete(); } StepVerifier.create(certificateAsyncClient.listPropertiesOfCertificates(false) .map(certificate -> { certificates.remove(certificate.getName()); return Mono.empty(); }).last()) .assertNext(ignore -> { assertEquals(0, certificates.size()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); createIssuerRunner((issuer) -> { StepVerifier.create(certificateAsyncClient.createIssuer(issuer)) .assertNext(createdIssuer -> { assertTrue(issuerCreatedCorrectly(issuer, createdIssuer)); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuerEmptyName(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.createIssuer(new CertificateIssuer("", ""))) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_BAD_METHOD)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuerNullProvider(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.createIssuer(new CertificateIssuer("", null))) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_BAD_METHOD)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createIssuerNull(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.createIssuer(null)) .verifyErrorSatisfies(e -> assertEquals(NullPointerException.class, e.getClass())); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); getCertificateIssuerRunner((issuer) -> { AtomicReference<CertificateIssuer> certificateIssuer = new AtomicReference<>(); StepVerifier.create(certificateAsyncClient.createIssuer(issuer) .flatMap(createdIssuer -> { certificateIssuer.set(createdIssuer); return certificateAsyncClient.getIssuer(issuer.getName()); })) .assertNext(retrievedIssuer -> { assertTrue(issuerCreatedCorrectly(certificateIssuer.get(), retrievedIssuer)); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateIssuerNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.backupCertificate("non-existing")) .verifyErrorSatisfies(e -> { assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); deleteCertificateIssuerRunner((issuer) -> { AtomicReference<CertificateIssuer> createdIssuer = new AtomicReference<>(); StepVerifier.create(certificateAsyncClient.createIssuer(issuer) .flatMap(certificateIssuer -> { createdIssuer.set(certificateIssuer); return certificateAsyncClient.deleteIssuer(issuer.getName()); })) .assertNext(deletedIssuer -> { assertTrue(issuerCreatedCorrectly(createdIssuer.get(), deletedIssuer)); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteCertificateIssuerNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.backupCertificate("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listCertificateIssuers(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); listCertificateIssuersRunner((certificateIssuers) -> { HashMap<String, CertificateIssuer> certificateIssuersToList = new HashMap<>(certificateIssuers); AtomicInteger count = new AtomicInteger(0); for (CertificateIssuer issuer : certificateIssuers.values()) { StepVerifier.create(certificateAsyncClient.createIssuer(issuer)) .assertNext(certificateIssuer -> { assertNotNull(certificateIssuer.getName()); }).verifyComplete(); } StepVerifier.create(certificateAsyncClient.listPropertiesOfIssuers() .map(issuerProperties -> { if (certificateIssuersToList.containsKey(issuerProperties.getName())) { count.incrementAndGet(); } return Mono.empty(); }).last()) .assertNext(ignore -> assertEquals(certificateIssuersToList.size(), count.get())) .verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateIssuer(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); updateIssuerRunner((issuerToCreate, issuerToUpdate) -> { StepVerifier.create(certificateAsyncClient.createIssuer(issuerToCreate) .flatMap(createdIssuer -> certificateAsyncClient.updateIssuer(issuerToUpdate))) .assertNext(updatedIssuer -> assertTrue(issuerUpdatedCorrectly(issuerToCreate, updatedIssuer))).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void setContacts(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); List<CertificateContact> contacts = Arrays.asList(setupContact()); StepVerifier.create(certificateAsyncClient.setContacts(contacts)) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); StepVerifier.create(certificateAsyncClient.deleteContacts().then(Mono.just("complete"))) .assertNext(input -> assertEquals("complete", input)).verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void listContacts(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); List<CertificateContact> contacts = Arrays.asList(setupContact()); StepVerifier.create(certificateAsyncClient.setContacts(contacts)) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); sleepInRecordMode(6000); StepVerifier.create(certificateAsyncClient.listContacts()) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void deleteContacts(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); List<CertificateContact> contacts = Arrays.asList(setupContact()); StepVerifier.create(certificateAsyncClient.setContacts(contacts)) .assertNext(contact -> validateContact(setupContact(), contact)) .verifyComplete(); StepVerifier.create(certificateAsyncClient.deleteContacts()) .assertNext(contact -> { validateContact(setupContact(), contact); }).verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificateOperationNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getCertificateOperation("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCertificatePolicyNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.getCertificatePolicy("non-existing")) .verifyErrorSatisfies(e -> assertResponseException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listCertificateVersions(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); String certName = testResourceNamer.randomName("testListCertVersion", 25); int versionsToCreate = 5; for (int i = 0; i < versionsToCreate; i++) { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last().flatMap(AsyncPollResponse::getFinalResult).then(Mono.just("complete"))).assertNext(input -> assertEquals("complete", input)).verifyComplete(); } AtomicInteger createdVersions = new AtomicInteger(); StepVerifier.create(certificateAsyncClient.listPropertiesOfCertificateVersions(certName) .map(certificateProperties -> { createdVersions.getAndIncrement(); return Mono.just("complete"); }).last()).assertNext(ignored -> assertEquals(versionsToCreate, createdVersions.get())).verifyComplete(); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listDeletedCertificates(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); if (interceptorManager.isLiveMode()) { return; } listDeletedCertificatesRunner((certificates) -> { HashSet<String> certificatesToDelete = new HashSet<>(certificates); for (String certName : certificatesToDelete) { PollerFlux<CertificateOperation, KeyVaultCertificateWithPolicy> certPoller = certificateAsyncClient.beginCreateCertificate(certName, CertificatePolicy.getDefault()); StepVerifier.create(certPoller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED).last() .then(Mono.just("complete"))).assertNext(input -> assertEquals("complete", input)).verifyComplete(); } for (String certName : certificates) { PollerFlux<DeletedCertificate, Void> poller = certificateAsyncClient.beginDeleteCertificate(certName); StepVerifier.create(poller .takeUntil(apr -> apr.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) .last()).assertNext(asyncPollResponse -> assertNotNull(asyncPollResponse.getValue())).verifyComplete(); } sleepInRecordMode(4000); StepVerifier.create(certificateAsyncClient.listDeletedCertificates() .map(deletedCertificate -> { certificatesToDelete.remove(deletedCertificate.getName()); return Mono.just("complete"); }).last()) .assertNext(ignored -> { assertEquals(0, certificatesToDelete.size()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void importCertificate(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); importCertificateRunner((importCertificateOptions) -> { StepVerifier.create(certificateAsyncClient.importCertificate(importCertificateOptions)) .assertNext(importedCertificate -> { assertTrue(toHexString(importedCertificate.getProperties().getX509Thumbprint()) .equalsIgnoreCase("7cb8b7539d87ba7215357b9b9049dff2d3fa59ba")); assertEquals(importCertificateOptions.isEnabled(), importedCertificate.getProperties().isEnabled()); X509Certificate x509Certificate = null; try { x509Certificate = loadCerToX509Certificate(importedCertificate); } catch (CertificateException | IOException e) { e.printStackTrace(); fail(); } assertEquals("CN=KeyVaultTest", x509Certificate.getSubjectX500Principal().getName()); assertEquals("CN=Root Agency", x509Certificate.getIssuerX500Principal().getName()); }).verifyComplete(); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") public void mergeCertificateNotFound(HttpClient httpClient, CertificateServiceVersion serviceVersion) { createCertificateAsyncClient(httpClient, serviceVersion); StepVerifier.create(certificateAsyncClient.mergeCertificate( new MergeCertificateOptions(testResourceNamer.randomName("testCert", 20), Arrays.asList("test".getBytes())))) .verifyErrorSatisfies(e -> assertResponseException(e, HttpResponseException.class, HttpURLConnection.HTTP_NOT_FOUND)); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") }
It seems like you decide to populate the `use_portable_job_submission` from SDK side, right?
public DataflowPipelineJob run(Pipeline pipeline) { if (useUnifiedWorker(options)) { List<String> experiments = options.getExperiments(); if (!experiments.contains("use_runner_v2")) { experiments.add("use_runner_v2"); } if (!experiments.contains("use_unified_worker")) { experiments.add("use_unified_worker"); } if (!experiments.contains("beam_fn_api")) { experiments.add("beam_fn_api"); } if (!experiments.contains("use_portable_job_submission")) { experiments.add("use_portable_job_submission"); } options.setExperiments(ImmutableList.copyOf(experiments)); } logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline); if (containsUnboundedPCollection(pipeline)) { options.setStreaming(true); } LOG.info( "Executing pipeline on the Dataflow Service, which will have billing implications " + "related to Google Compute Engine usage and other Google Cloud Services."); DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class); String workerHarnessContainerImageURL = DataflowRunner.getContainerImageForJob(dataflowOptions); RunnerApi.Environment defaultEnvironmentForDataflow = Environments.createDockerEnvironment(workerHarnessContainerImageURL); SdkComponents portableComponents = SdkComponents.create(); portableComponents.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); if (useUnifiedWorker(options)) { pipeline.replaceAll(getPortableOverrides()); } RunnerApi.Pipeline portablePipelineProto = PipelineTranslation.toProto(pipeline, portableComponents, false); LOG.info("Portable pipeline proto:\n{}", TextFormat.printToString(portablePipelineProto)); LOG.info("Staging portable pipeline proto to {}", options.getStagingLocation()); byte[] serializedProtoPipeline = portablePipelineProto.toByteArray(); DataflowPackage stagedPipeline = options.getStager().stageToFile(serializedProtoPipeline, PIPELINE_FILE_NAME); dataflowOptions.setPipelineUrl(stagedPipeline.getLocation()); replaceTransforms(pipeline); SdkComponents dataflowV1Components = SdkComponents.create(); dataflowV1Components.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); RunnerApi.Pipeline dataflowV1PipelineProto = PipelineTranslation.toProto(pipeline, dataflowV1Components, true); LOG.info("Dataflow v1 pipeline proto:\n{}", TextFormat.printToString(dataflowV1PipelineProto)); List<DataflowPackage> packages = stageArtifacts(dataflowV1PipelineProto); int randomNum = new Random().nextInt(9000) + 1000; String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm") .withZone(DateTimeZone.UTC) .print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum; maybeRegisterDebuggee(dataflowOptions, requestId); JobSpecification jobSpecification = translator.translate( pipeline, dataflowV1PipelineProto, dataflowV1Components, this, packages); if (!isNullOrEmpty(dataflowOptions.getDataflowWorkerJar()) && !useUnifiedWorker(options)) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); if (!experiments.contains("use_staged_dataflow_worker_jar")) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("use_staged_dataflow_worker_jar") .build()); } } Job newJob = jobSpecification.getJob(); try { newJob .getEnvironment() .setSdkPipelineOptions( MAPPER.readValue(MAPPER_WITH_MODULES.writeValueAsBytes(options), Map.class)); } catch (IOException e) { throw new IllegalArgumentException( "PipelineOptions specified failed to serialize to JSON.", e); } newJob.setClientRequestId(requestId); DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String version = dataflowRunnerInfo.getVersion(); checkState( !"${pom.version}".equals(version), "Unable to submit a job to the Dataflow service with unset version ${pom.version}"); LOG.info("Dataflow SDK version: {}", version); newJob.getEnvironment().setUserAgent((Map) dataflowRunnerInfo.getProperties()); if (!isNullOrEmpty(options.getGcpTempLocation())) { newJob .getEnvironment() .setTempStoragePrefix( dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation())); } newJob.getEnvironment().setDataset(options.getTempDatasetId()); if (options.getWorkerRegion() != null) { newJob.getEnvironment().setWorkerRegion(options.getWorkerRegion()); } if (options.getWorkerZone() != null) { newJob.getEnvironment().setWorkerZone(options.getWorkerZone()); } if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.COST_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_COST_OPTIMIZED"); } else if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.SPEED_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_SPEED_OPTIMIZED"); } if (!isNullOrEmpty(dataflowOptions.getMinCpuPlatform())) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); List<String> minCpuFlags = experiments.stream() .filter(p -> p.startsWith("min_cpu_platform")) .collect(Collectors.toList()); if (minCpuFlags.isEmpty()) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("min_cpu_platform=" + dataflowOptions.getMinCpuPlatform()) .build()); } else { LOG.warn( "Flag min_cpu_platform is defined in both top level PipelineOption, " + "as well as under experiments. Proceed using {}.", minCpuFlags.get(0)); } } newJob .getEnvironment() .setExperiments( ImmutableList.copyOf( firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()))); String workerHarnessContainerImage = getContainerImageForJob(options); for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) { workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage); } configureSdkHarnessContainerImages( options, portablePipelineProto, newJob, workerHarnessContainerImage); newJob.getEnvironment().setVersion(getEnvironmentVersion(options)); if (hooks != null) { hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment()); } if (hasExperiment(options, "upload_graph")) { DataflowPackage stagedGraph = options .getStager() .stageToFile( DataflowPipelineTranslator.jobToString(newJob).getBytes(UTF_8), DATAFLOW_GRAPH_FILE_NAME); newJob.getSteps().clear(); newJob.setStepsLocation(stagedGraph.getLocation()); } if (!isNullOrEmpty(options.getDataflowJobFile()) || !isNullOrEmpty(options.getTemplateLocation())) { boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation()); if (isTemplate) { checkArgument( isNullOrEmpty(options.getDataflowJobFile()), "--dataflowJobFile and --templateLocation are mutually exclusive."); } String fileLocation = firstNonNull(options.getTemplateLocation(), options.getDataflowJobFile()); checkArgument( fileLocation.startsWith("/") || fileLocation.startsWith("gs: "Location must be local or on Cloud Storage, got %s.", fileLocation); ResourceId fileResource = FileSystems.matchNewResource(fileLocation, false /* isDirectory */); String workSpecJson = DataflowPipelineTranslator.jobToString(newJob); try (PrintWriter printWriter = new PrintWriter( new BufferedWriter( new OutputStreamWriter( Channels.newOutputStream(FileSystems.create(fileResource, MimeTypes.TEXT)), UTF_8)))) { printWriter.print(workSpecJson); LOG.info("Printed job specification to {}", fileLocation); } catch (IOException ex) { String error = String.format("Cannot create output file at %s", fileLocation); if (isTemplate) { throw new RuntimeException(error, ex); } else { LOG.warn(error, ex); } } if (isTemplate) { LOG.info("Template successfully created."); return new DataflowTemplateJob(); } } String jobIdToUpdate = null; if (options.isUpdate()) { jobIdToUpdate = getJobIdFromName(options.getJobName()); newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setReplaceJobId(jobIdToUpdate); } if (options.getCreateFromSnapshot() != null && !options.getCreateFromSnapshot().isEmpty()) { newJob.setCreatedFromSnapshotId(options.getCreateFromSnapshot()); } Job jobResult; try { LOG.info("v1beta3 job: {}", newJob.toPrettyString()); jobResult = dataflowClient.createJob(newJob); } catch (GoogleJsonResponseException e) { String errorMessages = "Unexpected errors"; if (e.getDetails() != null) { if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) { errorMessages = "The size of the serialized JSON representation of the pipeline " + "exceeds the allowable limit. " + "For more information, please see the documentation on job submission:\n" + "https: } else { errorMessages = e.getDetails().getMessage(); } } throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e); } catch (IOException e) { throw new RuntimeException("Failed to create a workflow job", e); } DataflowPipelineJob dataflowPipelineJob = new DataflowPipelineJob( DataflowClient.create(options), jobResult.getId(), options, jobSpecification.getStepNames(), portablePipelineProto); if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty() && !jobResult.getClientRequestId().equals(requestId)) { if (options.isUpdate()) { throw new DataflowJobAlreadyUpdatedException( dataflowPipelineJob, String.format( "The job named %s with id: %s has already been updated into job id: %s " + "and cannot be updated again.", newJob.getName(), jobIdToUpdate, jobResult.getId())); } else { throw new DataflowJobAlreadyExistsException( dataflowPipelineJob, String.format( "There is already an active job named %s with id: %s. If you want to submit a" + " second job, try again by setting a different name using --jobName.", newJob.getName(), jobResult.getId())); } } LOG.info( "To access the Dataflow monitoring console, please navigate to {}", MonitoringUtil.getJobMonitoringPageURL( options.getProject(), options.getRegion(), jobResult.getId())); LOG.info("Submitted job: {}", jobResult.getId()); LOG.info( "To cancel the job using the 'gcloud' tool, run:\n> {}", MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId())); return dataflowPipelineJob; }
experiments.add("use_portable_job_submission");
public DataflowPipelineJob run(Pipeline pipeline) { if (useUnifiedWorker(options)) { List<String> experiments = options.getExperiments(); if (!experiments.contains("use_runner_v2")) { experiments.add("use_runner_v2"); } if (!experiments.contains("use_unified_worker")) { experiments.add("use_unified_worker"); } if (!experiments.contains("beam_fn_api")) { experiments.add("beam_fn_api"); } if (!experiments.contains("use_portable_job_submission")) { experiments.add("use_portable_job_submission"); } options.setExperiments(ImmutableList.copyOf(experiments)); } logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline); if (containsUnboundedPCollection(pipeline)) { options.setStreaming(true); } LOG.info( "Executing pipeline on the Dataflow Service, which will have billing implications " + "related to Google Compute Engine usage and other Google Cloud Services."); DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class); String workerHarnessContainerImageURL = DataflowRunner.getContainerImageForJob(dataflowOptions); RunnerApi.Environment defaultEnvironmentForDataflow = Environments.createDockerEnvironment(workerHarnessContainerImageURL); SdkComponents portableComponents = SdkComponents.create(); portableComponents.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); if (useUnifiedWorker(options)) { pipeline.replaceAll(getPortableOverrides()); } RunnerApi.Pipeline portablePipelineProto = PipelineTranslation.toProto(pipeline, portableComponents, false); LOG.debug("Portable pipeline proto:\n{}", TextFormat.printToString(portablePipelineProto)); LOG.info("Staging portable pipeline proto to {}", options.getStagingLocation()); byte[] serializedProtoPipeline = portablePipelineProto.toByteArray(); DataflowPackage stagedPipeline = options.getStager().stageToFile(serializedProtoPipeline, PIPELINE_FILE_NAME); dataflowOptions.setPipelineUrl(stagedPipeline.getLocation()); replaceTransforms(pipeline); SdkComponents dataflowV1Components = SdkComponents.create(); dataflowV1Components.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); RunnerApi.Pipeline dataflowV1PipelineProto = PipelineTranslation.toProto(pipeline, dataflowV1Components, true); LOG.debug("Dataflow v1 pipeline proto:\n{}", TextFormat.printToString(dataflowV1PipelineProto)); List<DataflowPackage> packages = stageArtifacts(dataflowV1PipelineProto); int randomNum = new Random().nextInt(9000) + 1000; String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm") .withZone(DateTimeZone.UTC) .print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum; maybeRegisterDebuggee(dataflowOptions, requestId); JobSpecification jobSpecification = translator.translate( pipeline, dataflowV1PipelineProto, dataflowV1Components, this, packages); if (!isNullOrEmpty(dataflowOptions.getDataflowWorkerJar()) && !useUnifiedWorker(options)) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); if (!experiments.contains("use_staged_dataflow_worker_jar")) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("use_staged_dataflow_worker_jar") .build()); } } Job newJob = jobSpecification.getJob(); try { newJob .getEnvironment() .setSdkPipelineOptions( MAPPER.readValue(MAPPER_WITH_MODULES.writeValueAsBytes(options), Map.class)); } catch (IOException e) { throw new IllegalArgumentException( "PipelineOptions specified failed to serialize to JSON.", e); } newJob.setClientRequestId(requestId); DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String version = dataflowRunnerInfo.getVersion(); checkState( !"${pom.version}".equals(version), "Unable to submit a job to the Dataflow service with unset version ${pom.version}"); LOG.info("Dataflow SDK version: {}", version); newJob.getEnvironment().setUserAgent((Map) dataflowRunnerInfo.getProperties()); if (!isNullOrEmpty(options.getGcpTempLocation())) { newJob .getEnvironment() .setTempStoragePrefix( dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation())); } newJob.getEnvironment().setDataset(options.getTempDatasetId()); if (options.getWorkerRegion() != null) { newJob.getEnvironment().setWorkerRegion(options.getWorkerRegion()); } if (options.getWorkerZone() != null) { newJob.getEnvironment().setWorkerZone(options.getWorkerZone()); } if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.COST_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_COST_OPTIMIZED"); } else if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.SPEED_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_SPEED_OPTIMIZED"); } if (!isNullOrEmpty(dataflowOptions.getMinCpuPlatform())) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); List<String> minCpuFlags = experiments.stream() .filter(p -> p.startsWith("min_cpu_platform")) .collect(Collectors.toList()); if (minCpuFlags.isEmpty()) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("min_cpu_platform=" + dataflowOptions.getMinCpuPlatform()) .build()); } else { LOG.warn( "Flag min_cpu_platform is defined in both top level PipelineOption, " + "as well as under experiments. Proceed using {}.", minCpuFlags.get(0)); } } newJob .getEnvironment() .setExperiments( ImmutableList.copyOf( firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()))); String workerHarnessContainerImage = getContainerImageForJob(options); for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) { workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage); } configureSdkHarnessContainerImages( options, portablePipelineProto, newJob, workerHarnessContainerImage); newJob.getEnvironment().setVersion(getEnvironmentVersion(options)); if (hooks != null) { hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment()); } if (hasExperiment(options, "upload_graph")) { DataflowPackage stagedGraph = options .getStager() .stageToFile( DataflowPipelineTranslator.jobToString(newJob).getBytes(UTF_8), DATAFLOW_GRAPH_FILE_NAME); newJob.getSteps().clear(); newJob.setStepsLocation(stagedGraph.getLocation()); } if (!isNullOrEmpty(options.getDataflowJobFile()) || !isNullOrEmpty(options.getTemplateLocation())) { boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation()); if (isTemplate) { checkArgument( isNullOrEmpty(options.getDataflowJobFile()), "--dataflowJobFile and --templateLocation are mutually exclusive."); } String fileLocation = firstNonNull(options.getTemplateLocation(), options.getDataflowJobFile()); checkArgument( fileLocation.startsWith("/") || fileLocation.startsWith("gs: "Location must be local or on Cloud Storage, got %s.", fileLocation); ResourceId fileResource = FileSystems.matchNewResource(fileLocation, false /* isDirectory */); String workSpecJson = DataflowPipelineTranslator.jobToString(newJob); try (PrintWriter printWriter = new PrintWriter( new BufferedWriter( new OutputStreamWriter( Channels.newOutputStream(FileSystems.create(fileResource, MimeTypes.TEXT)), UTF_8)))) { printWriter.print(workSpecJson); LOG.info("Printed job specification to {}", fileLocation); } catch (IOException ex) { String error = String.format("Cannot create output file at %s", fileLocation); if (isTemplate) { throw new RuntimeException(error, ex); } else { LOG.warn(error, ex); } } if (isTemplate) { LOG.info("Template successfully created."); return new DataflowTemplateJob(); } } String jobIdToUpdate = null; if (options.isUpdate()) { jobIdToUpdate = getJobIdFromName(options.getJobName()); newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setReplaceJobId(jobIdToUpdate); } if (options.getCreateFromSnapshot() != null && !options.getCreateFromSnapshot().isEmpty()) { newJob.setCreatedFromSnapshotId(options.getCreateFromSnapshot()); } Job jobResult; try { jobResult = dataflowClient.createJob(newJob); } catch (GoogleJsonResponseException e) { String errorMessages = "Unexpected errors"; if (e.getDetails() != null) { if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) { errorMessages = "The size of the serialized JSON representation of the pipeline " + "exceeds the allowable limit. " + "For more information, please see the documentation on job submission:\n" + "https: } else { errorMessages = e.getDetails().getMessage(); } } throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e); } catch (IOException e) { throw new RuntimeException("Failed to create a workflow job", e); } DataflowPipelineJob dataflowPipelineJob = new DataflowPipelineJob( DataflowClient.create(options), jobResult.getId(), options, jobSpecification.getStepNames(), portablePipelineProto); if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty() && !jobResult.getClientRequestId().equals(requestId)) { if (options.isUpdate()) { throw new DataflowJobAlreadyUpdatedException( dataflowPipelineJob, String.format( "The job named %s with id: %s has already been updated into job id: %s " + "and cannot be updated again.", newJob.getName(), jobIdToUpdate, jobResult.getId())); } else { throw new DataflowJobAlreadyExistsException( dataflowPipelineJob, String.format( "There is already an active job named %s with id: %s. If you want to submit a" + " second job, try again by setting a different name using --jobName.", newJob.getName(), jobResult.getId())); } } LOG.info( "To access the Dataflow monitoring console, please navigate to {}", MonitoringUtil.getJobMonitoringPageURL( options.getProject(), options.getRegion(), jobResult.getId())); LOG.info("Submitted job: {}", jobResult.getId()); LOG.info( "To cancel the job using the 'gcloud' tool, run:\n> {}", MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId())); return dataflowPipelineJob; }
class {@link * Combine.GroupedValues}
class {@link * Combine.GroupedValues}
Any particular reason to use 100_000 ?
private int fluxSequentialMergePrefetch(FeedOptions options, int numberOfPartitions, int pageSize, int fluxConcurrency) { int maxBufferedItemCount = options.getMaxBufferedItemCount(); if (maxBufferedItemCount <= 0) { maxBufferedItemCount = Math.min(Configs.CPU_CNT * numberOfPartitions * pageSize, 100_000); } int fluxPrefetch = Math.max(maxBufferedItemCount / (Math.max(fluxConcurrency * pageSize, 1)), 1); return Math.min(fluxPrefetch, Queues.XS_BUFFER_SIZE); }
maxBufferedItemCount = Math.min(Configs.CPU_CNT * numberOfPartitions * pageSize, 100_000);
private int fluxSequentialMergePrefetch(FeedOptions options, int numberOfPartitions, int pageSize, int fluxConcurrency) { int maxBufferedItemCount = options.getMaxBufferedItemCount(); if (maxBufferedItemCount <= 0) { maxBufferedItemCount = Math.min(Configs.getCPUCnt() * numberOfPartitions * pageSize, 100_000); } int fluxPrefetch = Math.max(maxBufferedItemCount / (Math.max(fluxConcurrency * pageSize, 1)), 1); return Math.min(fluxPrefetch, Queues.XS_BUFFER_SIZE); }
class EmptyPagesFilterTransformer<T extends Resource> implements Function<Flux<DocumentProducer<T>.DocumentProducerFeedResponse>, Flux<FeedResponse<T>>> { private final RequestChargeTracker tracker; private DocumentProducer<T>.DocumentProducerFeedResponse previousPage; public EmptyPagesFilterTransformer( RequestChargeTracker tracker) { if (tracker == null) { throw new IllegalArgumentException("Request Charge Tracker must not be null."); } this.tracker = tracker; this.previousPage = null; } private DocumentProducer<T>.DocumentProducerFeedResponse plusCharge( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, double charge) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); double pageCharge = page.getRequestCharge(); pageCharge += charge; headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(pageCharge)); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page)); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private DocumentProducer<T>.DocumentProducerFeedResponse addCompositeContinuationToken( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, String compositeContinuationToken) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); headers.put(HttpConstants.HttpHeaders.CONTINUATION, compositeContinuationToken); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page)); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private static Map<String, String> headerResponse( double requestCharge) { return Utils.immutableMapOf(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(requestCharge)); } @Override public Flux<FeedResponse<T>> apply(Flux<DocumentProducer<T>.DocumentProducerFeedResponse> source) { return source.filter(documentProducerFeedResponse -> { if (documentProducerFeedResponse.pageResult.getResults().isEmpty()) { tracker.addCharge(documentProducerFeedResponse.pageResult.getRequestCharge()); return false; } return true; }).map(documentProducerFeedResponse -> { double charge = tracker.getAndResetCharge(); if (charge > 0) { return new ValueHolder<>(plusCharge(documentProducerFeedResponse, charge)); } else { return new ValueHolder<>(documentProducerFeedResponse); } }).concatWith(Flux.just(new ValueHolder<>(null))).map(heldValue -> { DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse = heldValue.v; ImmutablePair<DocumentProducer<T>.DocumentProducerFeedResponse, DocumentProducer<T>.DocumentProducerFeedResponse> previousCurrent = new ImmutablePair<>( this.previousPage, documentProducerFeedResponse); this.previousPage = documentProducerFeedResponse; return previousCurrent; }).skip(1).map(currentNext -> { DocumentProducer<T>.DocumentProducerFeedResponse current = currentNext.left; DocumentProducer<T>.DocumentProducerFeedResponse next = currentNext.right; String compositeContinuationToken; String backendContinuationToken = current.pageResult.getContinuationToken(); if (backendContinuationToken == null) { if (next == null) { compositeContinuationToken = null; } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken(null, next.sourcePartitionKeyRange.toRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken( backendContinuationToken, current.sourcePartitionKeyRange.toRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } DocumentProducer<T>.DocumentProducerFeedResponse page; page = current; page = this.addCompositeContinuationToken(page, compositeContinuationToken); return page; }).map(documentProducerFeedResponse -> { return documentProducerFeedResponse.pageResult; }).switchIfEmpty(Flux.defer(() -> { return Flux.just(BridgeInternal.createFeedResponse(Utils.immutableListOf(), headerResponse(tracker.getAndResetCharge()))); })); } }
class EmptyPagesFilterTransformer<T extends Resource> implements Function<Flux<DocumentProducer<T>.DocumentProducerFeedResponse>, Flux<FeedResponse<T>>> { private final RequestChargeTracker tracker; private DocumentProducer<T>.DocumentProducerFeedResponse previousPage; public EmptyPagesFilterTransformer( RequestChargeTracker tracker) { if (tracker == null) { throw new IllegalArgumentException("Request Charge Tracker must not be null."); } this.tracker = tracker; this.previousPage = null; } private DocumentProducer<T>.DocumentProducerFeedResponse plusCharge( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, double charge) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); double pageCharge = page.getRequestCharge(); pageCharge += charge; headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(pageCharge)); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page)); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private DocumentProducer<T>.DocumentProducerFeedResponse addCompositeContinuationToken( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, String compositeContinuationToken) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); headers.put(HttpConstants.HttpHeaders.CONTINUATION, compositeContinuationToken); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page)); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private static Map<String, String> headerResponse( double requestCharge) { return Utils.immutableMapOf(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(requestCharge)); } @Override public Flux<FeedResponse<T>> apply(Flux<DocumentProducer<T>.DocumentProducerFeedResponse> source) { return source.filter(documentProducerFeedResponse -> { if (documentProducerFeedResponse.pageResult.getResults().isEmpty()) { tracker.addCharge(documentProducerFeedResponse.pageResult.getRequestCharge()); return false; } return true; }).map(documentProducerFeedResponse -> { double charge = tracker.getAndResetCharge(); if (charge > 0) { return new ValueHolder<>(plusCharge(documentProducerFeedResponse, charge)); } else { return new ValueHolder<>(documentProducerFeedResponse); } }).concatWith(Flux.just(new ValueHolder<>(null))).map(heldValue -> { DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse = heldValue.v; ImmutablePair<DocumentProducer<T>.DocumentProducerFeedResponse, DocumentProducer<T>.DocumentProducerFeedResponse> previousCurrent = new ImmutablePair<>( this.previousPage, documentProducerFeedResponse); this.previousPage = documentProducerFeedResponse; return previousCurrent; }).skip(1).map(currentNext -> { DocumentProducer<T>.DocumentProducerFeedResponse current = currentNext.left; DocumentProducer<T>.DocumentProducerFeedResponse next = currentNext.right; String compositeContinuationToken; String backendContinuationToken = current.pageResult.getContinuationToken(); if (backendContinuationToken == null) { if (next == null) { compositeContinuationToken = null; } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken(null, next.sourcePartitionKeyRange.toRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken( backendContinuationToken, current.sourcePartitionKeyRange.toRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } DocumentProducer<T>.DocumentProducerFeedResponse page; page = current; page = this.addCompositeContinuationToken(page, compositeContinuationToken); return page; }).map(documentProducerFeedResponse -> { return documentProducerFeedResponse.pageResult; }).switchIfEmpty(Flux.defer(() -> { return Flux.just(BridgeInternal.createFeedResponse(Utils.immutableListOf(), headerResponse(tracker.getAndResetCharge()))); })); } }
we can optimize to: ``` final GeneratedRecordEqualiser generatedEqualiser = new EqualiserCodeGenerator( rowTypeInfo.toRowType().getFields().stream() .map(RowType.RowField::getType) .toArray(LogicalType[]::new)) .generateRecordEqualiser("DeduplicateRowEqualiser"); ```
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner) { final ExecEdge inputEdge = getInputEdges().get(0); final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner); final InternalTypeInfo<RowData> rowTypeInfo = (InternalTypeInfo<RowData>) inputTransform.getOutputType(); final OneInputStreamOperator<RowData, RowData> operator; final TableConfig tableConfig = planner.getTableConfig(); final long stateIdleTime = tableConfig.getIdleStateRetention().toMillis(); final boolean isMiniBatchEnabled = tableConfig .getConfiguration() .getBoolean(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED); final EqualiserCodeGenerator equaliserCodeGen = new EqualiserCodeGenerator( rowTypeInfo.toRowType().getFields().stream() .map(RowType.RowField::getType) .toArray(LogicalType[]::new)); GeneratedRecordEqualiser generatedEqualiser = equaliserCodeGen.generateRecordEqualiser("DeduplicateRowEqualiser"); if (isMiniBatchEnabled) { TypeSerializer<RowData> rowSerializer = rowTypeInfo.createSerializer(planner.getExecEnv().getConfig()); ProcTimeMiniBatchDeduplicateKeepLastRowFunction processFunction = new ProcTimeMiniBatchDeduplicateKeepLastRowFunction( rowTypeInfo, rowSerializer, stateIdleTime, generateUpdateBefore, true, false, generatedEqualiser); CountBundleTrigger<RowData> trigger = AggregateUtil.createMiniBatchTrigger(tableConfig); operator = new KeyedMapBundleOperator<>(processFunction, trigger); } else { ProcTimeDeduplicateKeepLastRowFunction processFunction = new ProcTimeDeduplicateKeepLastRowFunction( rowTypeInfo, stateIdleTime, generateUpdateBefore, true, false, generatedEqualiser); operator = new KeyedProcessOperator<>(processFunction); } final OneInputTransformation<RowData, RowData> transform = new OneInputTransformation<>( inputTransform, getDescription(), operator, rowTypeInfo, inputTransform.getParallelism()); if (inputsContainSingleton()) { transform.setParallelism(1); transform.setMaxParallelism(1); } final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(uniqueKeys, rowTypeInfo); transform.setStateKeySelector(selector); transform.setStateKeyType(selector.getProducedType()); return transform; }
rowTypeInfo.toRowType().getFields().stream()
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner) { final ExecEdge inputEdge = getInputEdges().get(0); final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner); final InternalTypeInfo<RowData> rowTypeInfo = (InternalTypeInfo<RowData>) inputTransform.getOutputType(); final OneInputStreamOperator<RowData, RowData> operator; final TableConfig tableConfig = planner.getTableConfig(); final long stateIdleTime = tableConfig.getIdleStateRetention().toMillis(); final boolean isMiniBatchEnabled = tableConfig .getConfiguration() .getBoolean(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED); GeneratedRecordEqualiser generatedEqualiser = new EqualiserCodeGenerator(rowTypeInfo.toRowType()) .generateRecordEqualiser("DeduplicateRowEqualiser"); if (isMiniBatchEnabled) { TypeSerializer<RowData> rowSerializer = rowTypeInfo.createSerializer(planner.getExecEnv().getConfig()); ProcTimeMiniBatchDeduplicateKeepLastRowFunction processFunction = new ProcTimeMiniBatchDeduplicateKeepLastRowFunction( rowTypeInfo, rowSerializer, stateIdleTime, generateUpdateBefore, true, false, generatedEqualiser); CountBundleTrigger<RowData> trigger = AggregateUtil.createMiniBatchTrigger(tableConfig); operator = new KeyedMapBundleOperator<>(processFunction, trigger); } else { ProcTimeDeduplicateKeepLastRowFunction processFunction = new ProcTimeDeduplicateKeepLastRowFunction( rowTypeInfo, stateIdleTime, generateUpdateBefore, true, false, generatedEqualiser); operator = new KeyedProcessOperator<>(processFunction); } final OneInputTransformation<RowData, RowData> transform = new OneInputTransformation<>( inputTransform, getDescription(), operator, rowTypeInfo, inputTransform.getParallelism()); if (inputsContainSingleton()) { transform.setParallelism(1); transform.setMaxParallelism(1); } final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(uniqueKeys, rowTypeInfo); transform.setStateKeySelector(selector); transform.setStateKeyType(selector.getProducedType()); return transform; }
class StreamExecChangelogNormalize extends ExecNodeBase<RowData> implements StreamExecNode<RowData> { private final int[] uniqueKeys; private final boolean generateUpdateBefore; public StreamExecChangelogNormalize( int[] uniqueKeys, boolean generateUpdateBefore, InputProperty inputProperty, RowType outputType, String description) { super(Collections.singletonList(inputProperty), outputType, description); this.uniqueKeys = uniqueKeys; this.generateUpdateBefore = generateUpdateBefore; } @SuppressWarnings("unchecked") @Override }
class StreamExecChangelogNormalize extends ExecNodeBase<RowData> implements StreamExecNode<RowData> { private final int[] uniqueKeys; private final boolean generateUpdateBefore; public StreamExecChangelogNormalize( int[] uniqueKeys, boolean generateUpdateBefore, InputProperty inputProperty, RowType outputType, String description) { super(Collections.singletonList(inputProperty), outputType, description); this.uniqueKeys = uniqueKeys; this.generateUpdateBefore = generateUpdateBefore; } @SuppressWarnings("unchecked") @Override }
The visible type name for the service variable should be `service`
public void globalVariableVisibilityTest() throws BallerinaTestException { debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); globalVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.GLOBAL); Assert.assertEquals(globalVariables.size(), 14); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); globalVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.GLOBAL); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.LOCAL); Assert.assertEquals(globalVariables.size(), 14); debugTestRunner.assertVariable(globalVariables, "nameWithoutType", "Ballerina", "string"); debugTestRunner.assertVariable(globalVariables, "nameWithType", "Ballerina", "string"); debugTestRunner.assertVariable(globalVariables, "nameMap", "map<string> (size = 1)", "map"); debugTestRunner.assertVariable(globalVariables, "nilWithoutType", "()", "nil"); debugTestRunner.assertVariable(globalVariables, "nilWithType", "()", "nil"); debugTestRunner.assertVariable(globalVariables, "RED", "RED", "string"); debugTestRunner.assertVariable(globalVariables, "BLUE", "Blue", "string"); debugTestRunner.assertVariable(globalVariables, "stringValue", "Ballerina", "string"); debugTestRunner.assertVariable(globalVariables, "decimalValue", "100.0", "decimal"); debugTestRunner.assertVariable(globalVariables, "byteValue", "2", "int"); debugTestRunner.assertVariable(globalVariables, "floatValue", "2.0", "float"); debugTestRunner.assertVariable(globalVariables, "jsonVar", "map<json> (size = 2)", "json"); debugTestRunner.assertVariable(globalVariables, " /:@[`{~π_IL", "IL with global var", "string"); debugTestRunner.assertVariable(globalVariables, "port", "9090", "int"); } @Test(dependsOnMethods = "globalVariableVisibilityTest", description = "Variable visibility test for local variables at the last line of main() method") public void localVariableVisibilityTest() { debugTestRunner.assertVariable(localVariables, "varVariable", "()", "nil"); debugTestRunner.assertVariable(localVariables, "booleanVar", "true", "boolean"); debugTestRunner.assertVariable(localVariables, "intVar", "5", "int"); debugTestRunner.assertVariable(localVariables, "floatVar", "-10.0", "float"); debugTestRunner.assertVariable(localVariables, "decimalVar", "3.5", "decimal"); debugTestRunner.assertVariable(localVariables, "stringVar", "foo", "string"); debugTestRunner.assertVariable(localVariables, "xmlVar", "<person gender=\"male\"><firstname>Praveen</firstname><lastname>Nada</lastname></person>", "xml"); debugTestRunner.assertVariable(localVariables, "arrayVar", "any[4]", "array"); debugTestRunner.assertVariable(localVariables, "tupleVar", "tuple[int,string] (size = 2)", "tuple"); debugTestRunner.assertVariable(localVariables, "mapVar", "map<string> (size = 4)", "map"); debugTestRunner.assertVariable(localVariables, "recordVar", " /:@[`{~π_123_ƮέŞŢ_Student", "record"); debugTestRunner.assertVariable(localVariables, "anonRecord", "record {| string city; string country; |}", "record"); debugTestRunner.assertVariable(localVariables, "errorVar", "SimpleErrorType", "error"); debugTestRunner.assertVariable(localVariables, "anonFunctionVar", "isolated function (string,string) returns (string)", "function"); debugTestRunner.assertVariable(localVariables, "futureVar", "future<int>", "future"); debugTestRunner.assertVariable(localVariables, "objectVar", "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertVariable(localVariables, "anonObjectVar", "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertVariable(localVariables, "typedescVar", "int", "typedesc"); debugTestRunner.assertVariable(localVariables, "unionVar", "foo", "string"); debugTestRunner.assertVariable(localVariables, "optionalVar", "foo", "string"); debugTestRunner.assertVariable(localVariables, "anyVar", "15.0", "float"); debugTestRunner.assertVariable(localVariables, "anydataVar", "619", "int"); debugTestRunner.assertVariable(localVariables, "byteVar", "128", "int"); debugTestRunner.assertVariable(localVariables, "jsonVar", "map<json> (size = 3)", "json"); debugTestRunner.assertVariable(localVariables, "tableWithKeyVar", "table<Employee> (entries = 3)", "table"); debugTestRunner.assertVariable(localVariables, "tableWithoutKeyVar", "table<Employee> (entries = 3)", "table"); debugTestRunner.assertVariable(localVariables, "oddNumberStream", "stream<int, error>", "stream"); debugTestRunner.assertVariable(localVariables, " /:@[`{~π_var", "IL with special characters in var", "string"); debugTestRunner.assertVariable(localVariables, "üňĩćőđę_var", "IL with unicode characters in var", "string"); debugTestRunner.assertVariable(localVariables, "ĠĿŐΒȂɭ_ /:@[`{~π_json", "map<json> (size = 0)", "json"); debugTestRunner.assertVariable(localVariables, "serviceVar", "service", "object"); } @Test(dependsOnMethods = "globalVariableVisibilityTest", description = "Child variable visibility test for local variables at the last line of main() method") public void localVariableChildrenVisibilityTest() throws BallerinaTestException { Map<String, Variable> xmlChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("xmlVar")); debugTestRunner.assertVariable(xmlChildVariables, "attributes", "map", "map"); debugTestRunner.assertVariable(xmlChildVariables, "children", "XMLSequence (size = 2)", "xml"); Map<String, Variable> xmlAttributesChildVariables = debugTestRunner.fetchChildVariables(xmlChildVariables.get("attributes")); debugTestRunner.assertVariable(xmlAttributesChildVariables, "gender", "male", "string"); Map<String, Variable> xmlChildrenVariables = debugTestRunner.fetchChildVariables(xmlChildVariables.get("children")); debugTestRunner.assertVariable(xmlChildrenVariables, "[0]", "<firstname>Praveen</firstname>", "xml"); debugTestRunner.assertVariable(xmlChildrenVariables, "[1]", "<lastname>Nada</lastname>", "xml"); Map<String, Variable> xmlGrandChildrenVariables = debugTestRunner.fetchChildVariables(xmlChildrenVariables.get("[0]")); debugTestRunner.assertVariable(xmlGrandChildrenVariables, "children", "XMLSequence (size = 1)", "xml"); Map<String, Variable> arrayChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("arrayVar")); debugTestRunner.assertVariable(arrayChildVariables, "[0]", "1", "int"); debugTestRunner.assertVariable(arrayChildVariables, "[1]", "20", "int"); debugTestRunner.assertVariable(arrayChildVariables, "[2]", "-10.0", "float"); debugTestRunner.assertVariable(arrayChildVariables, "[3]", "foo", "string"); Map<String, Variable> byteChildVars = debugTestRunner.fetchChildVariables(localVariables.get("byteArrayVar")); debugTestRunner.assertVariable(byteChildVars, "[0]", "105", "byte"); debugTestRunner.assertVariable(byteChildVars, "[1]", "166", "byte"); Map<String, Variable> tupleChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("tupleVar")); debugTestRunner.assertVariable(tupleChildVariables, "[0]", "20", "int"); debugTestRunner.assertVariable(tupleChildVariables, "[1]", "foo", "string"); Map<String, Variable> mapChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("mapVar")); debugTestRunner.assertVariable(mapChildVariables, "city", "Colombo 03", "string"); debugTestRunner.assertVariable(mapChildVariables, "country", "Sri Lanka", "string"); debugTestRunner.assertVariable(mapChildVariables, "line1", "No. 20", "string"); debugTestRunner.assertVariable(mapChildVariables, "line2", "Palm Grove", "string"); Map<String, Variable> studentRecordChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("recordVar")); debugTestRunner.assertVariable(studentRecordChildVariables, "1st_name", "John Doe", "string"); debugTestRunner.assertVariable(studentRecordChildVariables, "grades", "Grades", "record"); debugTestRunner.assertVariable(studentRecordChildVariables, "Ȧɢέ_ /:@[`{~π", "20", "int"); Map<String, Variable> gradesChildVariables = debugTestRunner.fetchChildVariables(studentRecordChildVariables.get("grades")); debugTestRunner.assertVariable(gradesChildVariables, "chemistry", "65", "int"); debugTestRunner.assertVariable(gradesChildVariables, "maths", "80", "int"); debugTestRunner.assertVariable(gradesChildVariables, "physics", "75", "int"); Map<String, Variable> recordChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("anonRecord")); debugTestRunner.assertVariable(recordChildVariables, "city", "London", "string"); debugTestRunner.assertVariable(recordChildVariables, "country", "UK", "string"); Map<String, Variable> errorChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("errorVar")); debugTestRunner.assertVariable(errorChildVariables, "details", "map<(ballerina/lang.value:1.0.0:Cloneable " + "(size = 1)", "map"); debugTestRunner.assertVariable(errorChildVariables, "message", "SimpleErrorType", "string"); Map<String, Variable> errorDetailsChildVariables = debugTestRunner.fetchChildVariables(errorChildVariables.get("details")); debugTestRunner.assertVariable(errorDetailsChildVariables, "message", "Simple error occurred", "string"); Map<String, Variable> futureChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("futureVar")); debugTestRunner.assertVariable(futureChildVariables, "isDone", "true", "boolean"); debugTestRunner.assertVariable(futureChildVariables, "result", "90", "int"); Map<String, Variable> personObjectChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("objectVar")); debugTestRunner.assertVariable(personObjectChildVariables, "1st_name", "John", "string"); debugTestRunner.assertVariable(personObjectChildVariables, "address", "No 20, Palm grove", "string"); debugTestRunner.assertVariable(personObjectChildVariables, "parent", "()", "nil"); debugTestRunner.assertVariable(personObjectChildVariables, "email", "[email protected]", "string"); debugTestRunner.assertVariable(personObjectChildVariables, "Ȧɢέ_ /:@[`{~π", "0", "int"); Map<String, Variable> anonObjectChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("anonObjectVar")); debugTestRunner.assertVariable(anonObjectChildVariables, "1st_name", "John", "string"); debugTestRunner.assertVariable(anonObjectChildVariables, "address", "No 20, Palm grove", "string"); debugTestRunner.assertVariable(anonObjectChildVariables, "parent", "()", "nil"); debugTestRunner.assertVariable(anonObjectChildVariables, "email", "[email protected]", "string"); debugTestRunner.assertVariable(anonObjectChildVariables, "Ȧɢέ_ /:@[`{~π", "0", "int"); Map<String, Variable> jsonChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("jsonVar")); debugTestRunner.assertVariable(jsonChildVariables, "color", "red", "string"); debugTestRunner.assertVariable(jsonChildVariables, "name", "apple", "string"); debugTestRunner.assertVariable(jsonChildVariables, "price", "40", "int"); Map<String, Variable> tableWithKeyChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("tableWithKeyVar")); debugTestRunner.assertVariable(tableWithKeyChildVariables, "[0]", "Employee", "record"); debugTestRunner.assertVariable(tableWithKeyChildVariables, "[1]", "Employee", "record"); debugTestRunner.assertVariable(tableWithKeyChildVariables, "[2]", "Employee", "record"); Map<String, Variable> tableWithoutKeyChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("tableWithoutKeyVar")); debugTestRunner.assertVariable(tableWithoutKeyChildVariables, "[0]", "Employee", "record"); debugTestRunner.assertVariable(tableWithoutKeyChildVariables, "[1]", "Employee", "record"); debugTestRunner.assertVariable(tableWithoutKeyChildVariables, "[2]", "Employee", "record"); Map<String, Variable> serviceChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("serviceVar")); debugTestRunner.assertVariable(serviceChildVariables, "i", "5", "int"); } @AfterClass(alwaysRun = true) public void cleanUp() { debugTestRunner.terminateDebugSession(); globalVariables.clear(); localVariables.clear(); } }
debugTestRunner.assertVariable(localVariables, "serviceVar", "service", "object");
public void globalVariableVisibilityTest() throws BallerinaTestException { debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); globalVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.GLOBAL); Assert.assertEquals(globalVariables.size(), 14); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); globalVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.GLOBAL); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.LOCAL); Assert.assertEquals(globalVariables.size(), 14); debugTestRunner.assertVariable(globalVariables, "nameWithoutType", "Ballerina", "string"); debugTestRunner.assertVariable(globalVariables, "nameWithType", "Ballerina", "string"); debugTestRunner.assertVariable(globalVariables, "nameMap", "map<string> (size = 1)", "map"); debugTestRunner.assertVariable(globalVariables, "nilWithoutType", "()", "nil"); debugTestRunner.assertVariable(globalVariables, "nilWithType", "()", "nil"); debugTestRunner.assertVariable(globalVariables, "RED", "RED", "string"); debugTestRunner.assertVariable(globalVariables, "BLUE", "Blue", "string"); debugTestRunner.assertVariable(globalVariables, "stringValue", "Ballerina", "string"); debugTestRunner.assertVariable(globalVariables, "decimalValue", "100.0", "decimal"); debugTestRunner.assertVariable(globalVariables, "byteValue", "2", "int"); debugTestRunner.assertVariable(globalVariables, "floatValue", "2.0", "float"); debugTestRunner.assertVariable(globalVariables, "jsonVar", "map<json> (size = 2)", "json"); debugTestRunner.assertVariable(globalVariables, " /:@[`{~π_IL", "IL with global var", "string"); debugTestRunner.assertVariable(globalVariables, "port", "9090", "int"); } @Test(dependsOnMethods = "globalVariableVisibilityTest", description = "Variable visibility test for local variables at the last line of main() method") public void localVariableVisibilityTest() { debugTestRunner.assertVariable(localVariables, "varVariable", "()", "nil"); debugTestRunner.assertVariable(localVariables, "booleanVar", "true", "boolean"); debugTestRunner.assertVariable(localVariables, "intVar", "5", "int"); debugTestRunner.assertVariable(localVariables, "floatVar", "-10.0", "float"); debugTestRunner.assertVariable(localVariables, "decimalVar", "3.5", "decimal"); debugTestRunner.assertVariable(localVariables, "stringVar", "foo", "string"); debugTestRunner.assertVariable(localVariables, "xmlVar", "<person gender=\"male\"><firstname>Praveen</firstname><lastname>Nada</lastname></person>", "xml"); debugTestRunner.assertVariable(localVariables, "arrayVar", "any[4]", "array"); debugTestRunner.assertVariable(localVariables, "tupleVar", "tuple[int,string] (size = 2)", "tuple"); debugTestRunner.assertVariable(localVariables, "mapVar", "map<string> (size = 4)", "map"); debugTestRunner.assertVariable(localVariables, "recordVar", " /:@[`{~π_123_ƮέŞŢ_Student", "record"); debugTestRunner.assertVariable(localVariables, "anonRecord", "record {| string city; string country; |}", "record"); debugTestRunner.assertVariable(localVariables, "errorVar", "SimpleErrorType", "error"); debugTestRunner.assertVariable(localVariables, "anonFunctionVar", "isolated function (string,string) returns (string)", "function"); debugTestRunner.assertVariable(localVariables, "futureVar", "future<int>", "future"); debugTestRunner.assertVariable(localVariables, "objectVar", "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertVariable(localVariables, "anonObjectVar", "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertVariable(localVariables, "typedescVar", "int", "typedesc"); debugTestRunner.assertVariable(localVariables, "unionVar", "foo", "string"); debugTestRunner.assertVariable(localVariables, "optionalVar", "foo", "string"); debugTestRunner.assertVariable(localVariables, "anyVar", "15.0", "float"); debugTestRunner.assertVariable(localVariables, "anydataVar", "619", "int"); debugTestRunner.assertVariable(localVariables, "byteVar", "128", "int"); debugTestRunner.assertVariable(localVariables, "jsonVar", "map<json> (size = 3)", "json"); debugTestRunner.assertVariable(localVariables, "tableWithKeyVar", "table<Employee> (entries = 3)", "table"); debugTestRunner.assertVariable(localVariables, "tableWithoutKeyVar", "table<Employee> (entries = 3)", "table"); debugTestRunner.assertVariable(localVariables, "oddNumberStream", "stream<int, error>", "stream"); debugTestRunner.assertVariable(localVariables, " /:@[`{~π_var", "IL with special characters in var", "string"); debugTestRunner.assertVariable(localVariables, "üňĩćőđę_var", "IL with unicode characters in var", "string"); debugTestRunner.assertVariable(localVariables, "ĠĿŐΒȂɭ_ /:@[`{~π_json", "map<json> (size = 0)", "json"); debugTestRunner.assertVariable(localVariables, "serviceVar", "service", "service"); } @Test(dependsOnMethods = "globalVariableVisibilityTest", description = "Child variable visibility test for local variables at the last line of main() method") public void localVariableChildrenVisibilityTest() throws BallerinaTestException { Map<String, Variable> xmlChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("xmlVar")); debugTestRunner.assertVariable(xmlChildVariables, "attributes", "map", "map"); debugTestRunner.assertVariable(xmlChildVariables, "children", "XMLSequence (size = 2)", "xml"); Map<String, Variable> xmlAttributesChildVariables = debugTestRunner.fetchChildVariables(xmlChildVariables.get("attributes")); debugTestRunner.assertVariable(xmlAttributesChildVariables, "gender", "male", "string"); Map<String, Variable> xmlChildrenVariables = debugTestRunner.fetchChildVariables(xmlChildVariables.get("children")); debugTestRunner.assertVariable(xmlChildrenVariables, "[0]", "<firstname>Praveen</firstname>", "xml"); debugTestRunner.assertVariable(xmlChildrenVariables, "[1]", "<lastname>Nada</lastname>", "xml"); Map<String, Variable> xmlGrandChildrenVariables = debugTestRunner.fetchChildVariables(xmlChildrenVariables.get("[0]")); debugTestRunner.assertVariable(xmlGrandChildrenVariables, "children", "XMLSequence (size = 1)", "xml"); Map<String, Variable> arrayChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("arrayVar")); debugTestRunner.assertVariable(arrayChildVariables, "[0]", "1", "int"); debugTestRunner.assertVariable(arrayChildVariables, "[1]", "20", "int"); debugTestRunner.assertVariable(arrayChildVariables, "[2]", "-10.0", "float"); debugTestRunner.assertVariable(arrayChildVariables, "[3]", "foo", "string"); Map<String, Variable> byteChildVars = debugTestRunner.fetchChildVariables(localVariables.get("byteArrayVar")); debugTestRunner.assertVariable(byteChildVars, "[0]", "105", "byte"); debugTestRunner.assertVariable(byteChildVars, "[1]", "166", "byte"); Map<String, Variable> tupleChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("tupleVar")); debugTestRunner.assertVariable(tupleChildVariables, "[0]", "20", "int"); debugTestRunner.assertVariable(tupleChildVariables, "[1]", "foo", "string"); Map<String, Variable> mapChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("mapVar")); debugTestRunner.assertVariable(mapChildVariables, "city", "Colombo 03", "string"); debugTestRunner.assertVariable(mapChildVariables, "country", "Sri Lanka", "string"); debugTestRunner.assertVariable(mapChildVariables, "line1", "No. 20", "string"); debugTestRunner.assertVariable(mapChildVariables, "line2", "Palm Grove", "string"); Map<String, Variable> studentRecordChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("recordVar")); debugTestRunner.assertVariable(studentRecordChildVariables, "1st_name", "John Doe", "string"); debugTestRunner.assertVariable(studentRecordChildVariables, "grades", "Grades", "record"); debugTestRunner.assertVariable(studentRecordChildVariables, "Ȧɢέ_ /:@[`{~π", "20", "int"); Map<String, Variable> gradesChildVariables = debugTestRunner.fetchChildVariables(studentRecordChildVariables.get("grades")); debugTestRunner.assertVariable(gradesChildVariables, "chemistry", "65", "int"); debugTestRunner.assertVariable(gradesChildVariables, "maths", "80", "int"); debugTestRunner.assertVariable(gradesChildVariables, "physics", "75", "int"); Map<String, Variable> recordChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("anonRecord")); debugTestRunner.assertVariable(recordChildVariables, "city", "London", "string"); debugTestRunner.assertVariable(recordChildVariables, "country", "UK", "string"); Map<String, Variable> errorChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("errorVar")); debugTestRunner.assertVariable(errorChildVariables, "details", "map<(ballerina/lang.value:1.0.0:Cloneable " + "(size = 1)", "map"); debugTestRunner.assertVariable(errorChildVariables, "message", "SimpleErrorType", "string"); Map<String, Variable> errorDetailsChildVariables = debugTestRunner.fetchChildVariables(errorChildVariables.get("details")); debugTestRunner.assertVariable(errorDetailsChildVariables, "message", "Simple error occurred", "string"); Map<String, Variable> futureChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("futureVar")); debugTestRunner.assertVariable(futureChildVariables, "isDone", "true", "boolean"); debugTestRunner.assertVariable(futureChildVariables, "result", "90", "int"); Map<String, Variable> personObjectChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("objectVar")); debugTestRunner.assertVariable(personObjectChildVariables, "1st_name", "John", "string"); debugTestRunner.assertVariable(personObjectChildVariables, "address", "No 20, Palm grove", "string"); debugTestRunner.assertVariable(personObjectChildVariables, "parent", "()", "nil"); debugTestRunner.assertVariable(personObjectChildVariables, "email", "[email protected]", "string"); debugTestRunner.assertVariable(personObjectChildVariables, "Ȧɢέ_ /:@[`{~π", "0", "int"); Map<String, Variable> anonObjectChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("anonObjectVar")); debugTestRunner.assertVariable(anonObjectChildVariables, "1st_name", "John", "string"); debugTestRunner.assertVariable(anonObjectChildVariables, "address", "No 20, Palm grove", "string"); debugTestRunner.assertVariable(anonObjectChildVariables, "parent", "()", "nil"); debugTestRunner.assertVariable(anonObjectChildVariables, "email", "[email protected]", "string"); debugTestRunner.assertVariable(anonObjectChildVariables, "Ȧɢέ_ /:@[`{~π", "0", "int"); Map<String, Variable> jsonChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("jsonVar")); debugTestRunner.assertVariable(jsonChildVariables, "color", "red", "string"); debugTestRunner.assertVariable(jsonChildVariables, "name", "apple", "string"); debugTestRunner.assertVariable(jsonChildVariables, "price", "40", "int"); Map<String, Variable> tableWithKeyChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("tableWithKeyVar")); debugTestRunner.assertVariable(tableWithKeyChildVariables, "[0]", "Employee", "record"); debugTestRunner.assertVariable(tableWithKeyChildVariables, "[1]", "Employee", "record"); debugTestRunner.assertVariable(tableWithKeyChildVariables, "[2]", "Employee", "record"); Map<String, Variable> tableWithoutKeyChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("tableWithoutKeyVar")); debugTestRunner.assertVariable(tableWithoutKeyChildVariables, "[0]", "Employee", "record"); debugTestRunner.assertVariable(tableWithoutKeyChildVariables, "[1]", "Employee", "record"); debugTestRunner.assertVariable(tableWithoutKeyChildVariables, "[2]", "Employee", "record"); Map<String, Variable> serviceChildVariables = debugTestRunner.fetchChildVariables(localVariables.get("serviceVar")); debugTestRunner.assertVariable(serviceChildVariables, "i", "5", "int"); } @AfterClass(alwaysRun = true) public void cleanUp() { debugTestRunner.terminateDebugSession(); globalVariables.clear(); localVariables.clear(); } }
class VariableVisibilityTest extends BaseTestCase { Pair<BallerinaTestDebugPoint, StoppedEventArguments> debugHitInfo; Map<String, Variable> globalVariables = new HashMap<>(); Map<String, Variable> localVariables = new HashMap<>(); DebugTestRunner debugTestRunner; @BeforeClass public void setup() throws BallerinaTestException { String testProjectName = "variable-tests"; String testModuleFileName = "main.bal"; debugTestRunner = new DebugTestRunner(testProjectName, testModuleFileName, true); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 117)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 198)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 217)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 224)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 231)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 240)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 246)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 253)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 286)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 264)); debugTestRunner.initDebugSession(DebugUtils.DebuggeeExecutionKind.RUN); } @Test(description = "Variable visibility test at the beginning(first line) of the main() method") public void initialVariableVisibilityTest() throws BallerinaTestException { debugHitInfo = debugTestRunner.waitForDebugHit(25000); globalVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.GLOBAL); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.LOCAL); Assert.assertEquals(globalVariables.size(), 14); } @Test(dependsOnMethods = "initialVariableVisibilityTest", description = "Variable visibility test in the middle of the main() method for a new variable") public void newVariableVisibilityTest() throws BallerinaTestException { debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.STEP_OVER); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertTrue(localVariables.containsKey("byteVar")); } @Test(dependsOnMethods = "newVariableVisibilityTest", description = "Variable visibility test in control flows") public void controlFlowVariableVisibilityTest() throws BallerinaTestException { debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 46); } @Test(dependsOnMethods = "controlFlowVariableVisibilityTest", description = "Variable visibility test for global variables")
class VariableVisibilityTest extends BaseTestCase { Pair<BallerinaTestDebugPoint, StoppedEventArguments> debugHitInfo; Map<String, Variable> globalVariables = new HashMap<>(); Map<String, Variable> localVariables = new HashMap<>(); DebugTestRunner debugTestRunner; @BeforeClass public void setup() throws BallerinaTestException { String testProjectName = "variable-tests"; String testModuleFileName = "main.bal"; debugTestRunner = new DebugTestRunner(testProjectName, testModuleFileName, true); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 117)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 198)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 217)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 224)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 231)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 240)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 246)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 253)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 286)); debugTestRunner.addBreakPoint(new BallerinaTestDebugPoint(debugTestRunner.testEntryFilePath, 264)); debugTestRunner.initDebugSession(DebugUtils.DebuggeeExecutionKind.RUN); } @Test(description = "Variable visibility test at the beginning(first line) of the main() method") public void initialVariableVisibilityTest() throws BallerinaTestException { debugHitInfo = debugTestRunner.waitForDebugHit(25000); globalVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.GLOBAL); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), VariableScope.LOCAL); Assert.assertEquals(globalVariables.size(), 14); } @Test(dependsOnMethods = "initialVariableVisibilityTest", description = "Variable visibility test in the middle of the main() method for a new variable") public void newVariableVisibilityTest() throws BallerinaTestException { debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.STEP_OVER); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertTrue(localVariables.containsKey("byteVar")); } @Test(dependsOnMethods = "newVariableVisibilityTest", description = "Variable visibility test in control flows") public void controlFlowVariableVisibilityTest() throws BallerinaTestException { debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 45); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); debugTestRunner.resumeProgram(debugHitInfo.getRight(), DebugTestRunner.DebugResumeKind.NEXT_BREAKPOINT); debugHitInfo = debugTestRunner.waitForDebugHit(10000); localVariables = debugTestRunner.fetchVariables(debugHitInfo.getRight(), DebugTestRunner.VariableScope.LOCAL); Assert.assertEquals(localVariables.size(), 46); } @Test(dependsOnMethods = "controlFlowVariableVisibilityTest", description = "Variable visibility test for global variables")
BTW I'm already changing these here: https://github.com/apache/flink/pull/18770
static ValidationException lookupDisabled(ObjectIdentifier objectIdentifier) { return new ValidationException( String.format( "The persisted plan does not include all required catalog metadata for table '%s'. " + "However, lookup is disabled because option '%s' = '%s'. " + "Either enable the catalog lookup with '%s' = '%s' / '%s' or " + "regenerate the plan with '%s' != '%s'. " + "Make sure the table is not compiled as a temporary table.", objectIdentifier.asSummaryString(), PLAN_RESTORE_CATALOG_OBJECTS.key(), CatalogPlanRestore.ALL_ENFORCED.name(), PLAN_RESTORE_CATALOG_OBJECTS.key(), IDENTIFIER.name(), CatalogPlanRestore.ALL.name(), PLAN_COMPILE_CATALOG_OBJECTS.key(), CatalogPlanCompilation.IDENTIFIER.name())); }
"The persisted plan does not include all required catalog metadata for table '%s'. "
static ValidationException lookupDisabled(ObjectIdentifier objectIdentifier) { return new ValidationException( String.format( "The persisted plan does not include all required catalog metadata for table '%s'. " + "However, lookup is disabled because option '%s' = '%s'. " + "Either enable the catalog lookup with '%s' = '%s' / '%s' or " + "regenerate the plan with '%s' != '%s'. " + "Make sure the table is not compiled as a temporary table.", objectIdentifier.asSummaryString(), PLAN_RESTORE_CATALOG_OBJECTS.key(), CatalogPlanRestore.ALL_ENFORCED.name(), PLAN_RESTORE_CATALOG_OBJECTS.key(), IDENTIFIER.name(), CatalogPlanRestore.ALL.name(), PLAN_COMPILE_CATALOG_OBJECTS.key(), CatalogPlanCompilation.IDENTIFIER.name())); }
class ContextResolvedTableJsonDeserializer extends StdDeserializer<ContextResolvedTable> { private static final long serialVersionUID = 1L; public ContextResolvedTableJsonDeserializer() { super(ContextResolvedTable.class); } @Override public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException { final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS); final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager(); final ObjectNode objectNode = jsonParser.readValueAsTree(); final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField( objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx) .orElse(null); ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField( objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx) .orElse(null); if (identifier == null && resolvedCatalogTable == null) { throw new ValidationException( String.format( "The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE)); } if (identifier == null) { if (isLookupForced(planRestoreOption)) { throw missingIdentifier(); } return ContextResolvedTable.anonymous(resolvedCatalogTable); } Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty(); if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) { ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema(); ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema(); if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) { throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog); } } if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) { if (!isLookupEnabled(planRestoreOption)) { throw lookupDisabled(identifier); } return contextResolvedTableFromCatalog.orElseThrow( () -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption))); } if (contextResolvedTableFromCatalog.isPresent()) { if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) { return contextResolvedTableFromCatalog.get(); } return contextResolvedTableFromCatalog .flatMap(ContextResolvedTable::getCatalog) .map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)) .orElseGet( () -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable)); } return ContextResolvedTable.temporary(identifier, resolvedCatalogTable); } private boolean areResolvedSchemasEqual( ResolvedSchema schemaFromPlan, ResolvedSchema schemaFromCatalog) { List<Column> columnsFromPlan = schemaFromPlan.getColumns(); List<Column> columnsFromCatalog = schemaFromCatalog.getColumns(); if (columnsFromPlan.size() != columnsFromCatalog.size()) { return false; } for (int i = 0; i < columnsFromPlan.size(); i++) { Column columnFromPlan = columnsFromPlan.get(i); Column columnFromCatalog = columnsFromCatalog.get(i); if (!Objects.equals(columnFromPlan.getName(), columnFromCatalog.getName()) || !Objects.equals(columnFromPlan.getClass(), columnFromCatalog.getClass()) || !Objects.equals( columnFromPlan.getDataType(), columnFromCatalog.getDataType())) { return false; } } return Objects.equals(schemaFromPlan.getPrimaryKey(), schemaFromCatalog.getPrimaryKey()); } private boolean isLookupForced(CatalogPlanRestore planRestoreOption) { return planRestoreOption == IDENTIFIER; } private boolean isLookupEnabled(CatalogPlanRestore planRestoreOption) { return planRestoreOption != CatalogPlanRestore.ALL_ENFORCED; } static ValidationException missingIdentifier() { return new ValidationException( String.format( "The table cannot be deserialized as no identifier is present in the persisted plan." + "However, lookup is forced by '%s' = '%s'. " + "Either allow restoring the table from the catalog with '%s' = '%s' / '%s' " + "or make sure to not use anonymous tables when generating the plan.", PLAN_RESTORE_CATALOG_OBJECTS.key(), IDENTIFIER.name(), PLAN_RESTORE_CATALOG_OBJECTS.key(), CatalogPlanRestore.ALL.name(), CatalogPlanRestore.ALL_ENFORCED.name())); } static ValidationException schemaNotMatching( ObjectIdentifier objectIdentifier, ResolvedSchema schemaFromPlan, ResolvedSchema schemaFromCatalog) { return new ValidationException( String.format( "The schema of table '%s' from the persisted plan does not match the " + "schema loaded from the catalog: '%s' != '%s'. " + "Make sure the table schema in the catalog is still identical.", objectIdentifier.asSummaryString(), schemaFromPlan, schemaFromCatalog)); } static ValidationException missingTableFromCatalog( ObjectIdentifier identifier, boolean forcedLookup) { String initialReason; if (forcedLookup) { initialReason = String.format( "Cannot resolve table '%s' and catalog lookup is forced because '%s' = '%s'. ", identifier.asSummaryString(), PLAN_RESTORE_CATALOG_OBJECTS.key(), IDENTIFIER); } else { initialReason = String.format( "Cannot resolve table '%s' and the persisted plan does not include " + "all required catalog table metadata. ", identifier.asSummaryString()); } return new ValidationException( initialReason + String.format( "Make sure a registered catalog contains the table when restoring or " + "the table is available as a temporary table. " + "Otherwise regenerate the plan with '%s' != '%s' and make " + "sure the table was not compiled as a temporary table.", PLAN_COMPILE_CATALOG_OBJECTS.key(), CatalogPlanCompilation.IDENTIFIER.name())); } }
class ContextResolvedTableJsonDeserializer extends StdDeserializer<ContextResolvedTable> { private static final long serialVersionUID = 1L; public ContextResolvedTableJsonDeserializer() { super(ContextResolvedTable.class); } @Override public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException { final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS); final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager(); final ObjectNode objectNode = jsonParser.readValueAsTree(); final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField( objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx) .orElse(null); ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField( objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx) .orElse(null); if (identifier == null && resolvedCatalogTable == null) { throw new ValidationException( String.format( "The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE)); } if (identifier == null) { if (isLookupForced(planRestoreOption)) { throw missingIdentifier(); } return ContextResolvedTable.anonymous(resolvedCatalogTable); } Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty(); if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) { ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema(); ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema(); if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) { throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog); } } if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) { if (!isLookupEnabled(planRestoreOption)) { throw lookupDisabled(identifier); } return contextResolvedTableFromCatalog.orElseThrow( () -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption))); } if (contextResolvedTableFromCatalog.isPresent()) { if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) { return contextResolvedTableFromCatalog.get(); } return contextResolvedTableFromCatalog .flatMap(ContextResolvedTable::getCatalog) .map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)) .orElseGet( () -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable)); } return ContextResolvedTable.temporary(identifier, resolvedCatalogTable); } private boolean areResolvedSchemasEqual( ResolvedSchema schemaFromPlan, ResolvedSchema schemaFromCatalog) { List<Column> columnsFromPlan = schemaFromPlan.getColumns(); List<Column> columnsFromCatalog = schemaFromCatalog.getColumns(); if (columnsFromPlan.size() != columnsFromCatalog.size()) { return false; } for (int i = 0; i < columnsFromPlan.size(); i++) { Column columnFromPlan = columnsFromPlan.get(i); Column columnFromCatalog = columnsFromCatalog.get(i); if (!Objects.equals(columnFromPlan.getName(), columnFromCatalog.getName()) || !Objects.equals(columnFromPlan.getClass(), columnFromCatalog.getClass()) || !Objects.equals( columnFromPlan.getDataType(), columnFromCatalog.getDataType())) { return false; } } return Objects.equals(schemaFromPlan.getPrimaryKey(), schemaFromCatalog.getPrimaryKey()); } private boolean isLookupForced(CatalogPlanRestore planRestoreOption) { return planRestoreOption == IDENTIFIER; } private boolean isLookupEnabled(CatalogPlanRestore planRestoreOption) { return planRestoreOption != CatalogPlanRestore.ALL_ENFORCED; } static ValidationException missingIdentifier() { return new ValidationException( String.format( "The table cannot be deserialized as no identifier is present in the persisted plan." + "However, lookup is forced by '%s' = '%s'. " + "Either allow restoring the table from the catalog with '%s' = '%s' / '%s' " + "or make sure to not use anonymous tables when generating the plan.", PLAN_RESTORE_CATALOG_OBJECTS.key(), IDENTIFIER.name(), PLAN_RESTORE_CATALOG_OBJECTS.key(), CatalogPlanRestore.ALL.name(), CatalogPlanRestore.ALL_ENFORCED.name())); } static ValidationException schemaNotMatching( ObjectIdentifier objectIdentifier, ResolvedSchema schemaFromPlan, ResolvedSchema schemaFromCatalog) { return new ValidationException( String.format( "The schema of table '%s' from the persisted plan does not match the " + "schema loaded from the catalog: '%s' != '%s'. " + "Make sure the table schema in the catalog is still identical.", objectIdentifier.asSummaryString(), schemaFromPlan, schemaFromCatalog)); } static ValidationException missingTableFromCatalog( ObjectIdentifier identifier, boolean forcedLookup) { String initialReason; if (forcedLookup) { initialReason = String.format( "Cannot resolve table '%s' and catalog lookup is forced because '%s' = '%s'. ", identifier.asSummaryString(), PLAN_RESTORE_CATALOG_OBJECTS.key(), IDENTIFIER); } else { initialReason = String.format( "Cannot resolve table '%s' and the persisted plan does not include " + "all required catalog table metadata. ", identifier.asSummaryString()); } return new ValidationException( initialReason + String.format( "Make sure a registered catalog contains the table when restoring or " + "the table is available as a temporary table. " + "Otherwise regenerate the plan with '%s' != '%s' and make " + "sure the table was not compiled as a temporary table.", PLAN_COMPILE_CATALOG_OBJECTS.key(), CatalogPlanCompilation.IDENTIFIER.name())); } }
It's `null != jobItemProgress` in if condition, but it use `jobItemInfo` here, it's not clear enough as before
public void init(final ShardingSphereDatabase database, final SQLStatement sqlStatement) { List<InventoryIncrementalJobItemInfo> jobItemInfos = JOB_API.getJobItemInfos(((ShowMigrationStatusStatement) sqlStatement).getJobId()); data = jobItemInfos.stream().map(each -> { MigrationJobItemInfo jobItemInfo = (MigrationJobItemInfo) each; Collection<Object> result = new LinkedList<>(); result.add(jobItemInfo.getShardingItem()); InventoryIncrementalJobItemProgress jobItemProgress = jobItemInfo.getJobItemProgress(); if (null != jobItemProgress) { result.add(jobItemProgress.getDataSourceName()); result.add(jobItemProgress.getStatus()); result.add(jobItemProgress.isActive() ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); result.add(jobItemProgress.getProcessedRecordsCount()); result.add(jobItemInfo.getInventoryFinishedPercentage()); result.add(jobItemInfo.getIncrementalIdleSeconds()); } else { result.add(""); result.add(""); result.add(""); result.add(""); result.add(""); result.add(""); } result.add(jobItemInfo.getErrorMessage()); return result; }).collect(Collectors.toList()).iterator(); }
result.add(jobItemInfo.getIncrementalIdleSeconds());
public void init(final ShardingSphereDatabase database, final SQLStatement sqlStatement) { long currentTimeMillis = System.currentTimeMillis(); List<InventoryIncrementalJobItemInfo> jobItemInfos = JOB_API.getJobItemInfos(((ShowMigrationStatusStatement) sqlStatement).getJobId()); data = jobItemInfos.stream().map(each -> { Collection<Object> result = new LinkedList<>(); result.add(each.getShardingItem()); InventoryIncrementalJobItemProgress jobItemProgress = each.getJobItemProgress(); if (null != jobItemProgress) { result.add(jobItemProgress.getDataSourceName()); result.add(jobItemProgress.getStatus()); result.add(jobItemProgress.isActive() ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); result.add(jobItemProgress.getProcessedRecordsCount()); result.add(each.getInventoryFinishedPercentage()); String incrementalIdleSeconds = ""; if (jobItemProgress.getIncremental().getIncrementalLatestActiveTimeMillis() > 0) { long latestActiveTimeMillis = Math.max(each.getStartTimeMillis(), jobItemProgress.getIncremental().getIncrementalLatestActiveTimeMillis()); incrementalIdleSeconds = String.valueOf(TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis - latestActiveTimeMillis)); } result.add(incrementalIdleSeconds); } else { result.add(""); result.add(""); result.add(""); result.add(""); result.add(""); result.add(""); } result.add(each.getErrorMessage()); return result; }).collect(Collectors.toList()).iterator(); }
class ShowMigrationJobStatusQueryResultSet implements DatabaseDistSQLResultSet { private static final MigrationJobPublicAPI JOB_API = PipelineJobPublicAPIFactory.getMigrationJobPublicAPI(); private Iterator<Collection<Object>> data; @Override @Override public Collection<String> getColumnNames() { return Arrays.asList("item", "data_source", "status", "active", "processed_records_count", "inventory_finished_percentage", "incremental_idle_seconds", "error_message"); } @Override public boolean next() { return data.hasNext(); } @Override public Collection<Object> getRowData() { return data.next(); } @Override public String getType() { return ShowMigrationStatusStatement.class.getName(); } }
class ShowMigrationJobStatusQueryResultSet implements DatabaseDistSQLResultSet { private static final MigrationJobPublicAPI JOB_API = PipelineJobPublicAPIFactory.getMigrationJobPublicAPI(); private Iterator<Collection<Object>> data; @Override @Override public Collection<String> getColumnNames() { return Arrays.asList("item", "data_source", "status", "active", "processed_records_count", "inventory_finished_percentage", "incremental_idle_seconds", "error_message"); } @Override public boolean next() { return data.hasNext(); } @Override public Collection<Object> getRowData() { return data.next(); } @Override public String getType() { return ShowMigrationStatusStatement.class.getName(); } }
Since I'm writing the scopes to the buffer on the fly, to write the scope count I'll have to start a brand new buffer and write to it. I felt like thats an overkill. WDYT?
private void writeTaintTable(ByteBuf buf, TaintTable taintTable) { ByteBuf birbuf = Unpooled.buffer(); birbuf.writeShort(taintTable.rowCount); birbuf.writeShort(taintTable.columnCount); for (Integer paramIndex : taintTable.taintTable.keySet()) { birbuf.writeShort(paramIndex); List<Byte> taintRecord = taintTable.taintTable.get(paramIndex); for (Byte taintStatus : taintRecord) { birbuf.writeByte(taintStatus); } } int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); }
birbuf.writeShort(taintTable.rowCount);
private void writeTaintTable(ByteBuf buf, TaintTable taintTable) { ByteBuf birbuf = Unpooled.buffer(); birbuf.writeShort(taintTable.rowCount); birbuf.writeShort(taintTable.columnCount); birbuf.writeInt(taintTable.taintTable.size()); for (Integer paramIndex : taintTable.taintTable.keySet()) { birbuf.writeShort(paramIndex); List<Byte> taintRecord = taintTable.taintTable.get(paramIndex); birbuf.writeInt(taintRecord.size()); for (Byte taintStatus : taintRecord) { birbuf.writeByte(taintStatus); } } int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); }
class BIRBinaryWriter { private final ConstantPool cp = new ConstantPool(); private final BIRNode.BIRPackage birPackage; public BIRBinaryWriter(BIRNode.BIRPackage birPackage) { this.birPackage = birPackage; } public byte[] serialize() { ByteBuf birbuf = Unpooled.buffer(); ByteBuf scopebuf = Unpooled.buffer(); BIRTypeWriter typeWriter = new BIRTypeWriter(birbuf, cp); BIRInstructionWriter insWriter = new BIRInstructionWriter(birbuf, scopebuf, cp, this); int orgCPIndex = addStringCPEntry(birPackage.org.value); int nameCPIndex = addStringCPEntry(birPackage.name.value); int versionCPIndex = addStringCPEntry(birPackage.version.value); int pkgIndex = cp.addCPEntry(new PackageCPEntry(orgCPIndex, nameCPIndex, versionCPIndex)); birbuf.writeInt(pkgIndex); writeImportModuleDecls(birbuf, birPackage.importModules); writeConstants(birbuf, birPackage.constants); writeTypeDefs(birbuf, typeWriter, insWriter, birPackage.typeDefs); writeGlobalVars(birbuf, typeWriter, birPackage.globalVars); writeTypeDefBodies(birbuf, typeWriter, insWriter, birPackage.typeDefs); writeFunctions(birbuf, typeWriter, insWriter, birPackage.functions); writeAnnotations(birbuf, typeWriter, birPackage.annotations); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DataOutputStream dataOut = new DataOutputStream(baos)) { dataOut.write(cp.serialize()); dataOut.write(birbuf.nioBuffer().array(), 0, birbuf.nioBuffer().limit()); return baos.toByteArray(); } catch (IOException e) { throw new BLangCompilerException("failed to serialize the bir", e); } } private void writeImportModuleDecls(ByteBuf buf, List<BIRNode.BIRImportModule> birImpModList) { buf.writeInt(birImpModList.size()); birImpModList.forEach(impMod -> { buf.writeInt(addStringCPEntry(impMod.org.value)); buf.writeInt(addStringCPEntry(impMod.name.value)); buf.writeInt(addStringCPEntry(impMod.version.value)); }); } /** * Write the type definitions. Only the container will be written, to avoid * cyclic dependencies with global vars. * * @param buf ByteBuf * @param typeWriter Type writer * @param insWriter Instruction writer * @param birTypeDefList Type definitions list */ private void writeTypeDefs(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRTypeDefinition> birTypeDefList) { buf.writeInt(birTypeDefList.size()); birTypeDefList.forEach(typeDef -> writeType(buf, typeWriter, insWriter, typeDef)); } /** * Write the body of the type definitions. * * @param buf ByteBuf * @param typeWriter Type writer * @param birTypeDefList Type definitions list */ private void writeTypeDefBodies(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRTypeDefinition> birTypeDefList) { List<BIRTypeDefinition> filtered = birTypeDefList.stream().filter(t -> t.type.tag == TypeTags.OBJECT || t.type.tag == TypeTags.RECORD).collect(Collectors.toList()); filtered.forEach(typeDef -> { writeFunctions(buf, typeWriter, insWriter, typeDef.attachedFuncs); writeReferencedTypes(buf, typeDef.referencedTypes); }); } private void writeReferencedTypes(ByteBuf buf, List<BType> referencedTypes) { buf.writeInt(referencedTypes.size()); referencedTypes.forEach(type -> writeType(buf, type)); } private void writeGlobalVars(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRGlobalVariableDcl> birGlobalVars) { buf.writeInt(birGlobalVars.size()); for (BIRGlobalVariableDcl birGlobalVar : birGlobalVars) { buf.writeByte(birGlobalVar.kind.getValue()); buf.writeInt(addStringCPEntry(birGlobalVar.name.value)); buf.writeInt(birGlobalVar.flags); typeWriter.writeMarkdownDocAttachment(buf, birGlobalVar.markdownDocAttachment); writeType(buf, birGlobalVar.type); } } private void writeType(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, BIRTypeDefinition typeDef) { insWriter.writePosition(typeDef.pos); buf.writeInt(addStringCPEntry(typeDef.name.value)); buf.writeInt(typeDef.flags); buf.writeByte(typeDef.isLabel ? 1 : 0); typeWriter.writeMarkdownDocAttachment(buf, typeDef.markdownDocAttachment); writeType(buf, typeDef.type); } private void writeFunctions(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRNode.BIRFunction> birFunctionList) { buf.writeInt(birFunctionList.size()); birFunctionList.forEach(func -> writeFunction(buf, typeWriter, insWriter, func)); } private void writeFunction(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, BIRNode.BIRFunction birFunction) { insWriter.writePosition(birFunction.pos); buf.writeInt(addStringCPEntry(birFunction.name.value)); buf.writeInt(addStringCPEntry(birFunction.workerName.value)); buf.writeInt(birFunction.flags); writeType(buf, birFunction.type); writeAnnotAttachments(buf, insWriter, birFunction.annotAttachments); buf.writeInt(birFunction.requiredParams.size()); for (BIRParameter parameter : birFunction.requiredParams) { buf.writeInt(addStringCPEntry(parameter.name.value)); buf.writeInt(parameter.flags); } boolean restParamExist = birFunction.restParam != null; buf.writeBoolean(restParamExist); if (restParamExist) { buf.writeInt(addStringCPEntry(birFunction.restParam.name.value)); } boolean hasReceiverType = birFunction.receiver != null; buf.writeBoolean(hasReceiverType); if (hasReceiverType) { buf.writeByte(birFunction.receiver.kind.getValue()); writeType(buf, birFunction.receiver.type); buf.writeInt(addStringCPEntry(birFunction.receiver.name.value)); } writeTaintTable(buf, birFunction.taintTable); typeWriter.writeMarkdownDocAttachment(buf, birFunction.markdownDocAttachment); ByteBuf birbuf = Unpooled.buffer(); ByteBuf scopebuf = Unpooled.buffer(); BIRInstructionWriter funcInsWriter = new BIRInstructionWriter(birbuf, scopebuf, cp, this); birbuf.writeInt(birFunction.argsCount); birbuf.writeBoolean(birFunction.returnVariable != null); if (birFunction.returnVariable != null) { birbuf.writeByte(birFunction.returnVariable.kind.getValue()); writeType(birbuf, birFunction.returnVariable.type); birbuf.writeInt(addStringCPEntry(birFunction.returnVariable.name.value)); } birbuf.writeInt(birFunction.parameters.size()); for (BIRNode.BIRFunctionParameter param : birFunction.parameters.keySet()) { birbuf.writeByte(param.kind.getValue()); writeType(birbuf, param.type); birbuf.writeInt(addStringCPEntry(param.name.value)); if (param.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(param.metaVarName != null ? param.metaVarName : "")); } birbuf.writeBoolean(param.hasDefaultExpr); } birbuf.writeInt(birFunction.localVars.size()); for (BIRNode.BIRVariableDcl localVar : birFunction.localVars) { birbuf.writeByte(localVar.kind.getValue()); writeType(birbuf, localVar.type); birbuf.writeInt(addStringCPEntry(localVar.name.value)); if (localVar.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); } if (localVar.kind.equals(VarKind.LOCAL)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); birbuf.writeInt(addStringCPEntry(localVar.endBB != null ? localVar.endBB.id.value : "")); birbuf.writeInt(addStringCPEntry(localVar.startBB != null ? localVar.startBB.id.value : "")); birbuf.writeInt(localVar.insOffset); } } birbuf.writeBoolean(!birFunction.parameters.isEmpty()); birFunction.parameters.values().forEach(funcInsWriter::writeBBs); funcInsWriter.writeBBs(birFunction.basicBlocks); funcInsWriter.writeErrorTable(birFunction.errorTable); birbuf.writeInt(birFunction.workerChannels.length); for (BIRNode.ChannelDetails details : birFunction.workerChannels) { birbuf.writeInt(addStringCPEntry(details.name)); birbuf.writeBoolean(details.channelInSameStrand); birbuf.writeBoolean(details.send); } writeScopes(buf, scopebuf, funcInsWriter.getScopeCount()); int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeScopes(ByteBuf buf, ByteBuf scopebuf, int scopeCount) { int length = scopebuf.nioBuffer().limit(); buf.writeLong(length + 4); buf.writeInt(scopeCount); buf.writeBytes(scopebuf.nioBuffer().array(), 0, length); } private void writeAnnotations(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRNode.BIRAnnotation> birAnnotationList) { buf.writeInt(birAnnotationList.size()); birAnnotationList.forEach(annotation -> writeAnnotation(buf, typeWriter, annotation)); } private void writeAnnotation(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRAnnotation birAnnotation) { buf.writeInt(addStringCPEntry(birAnnotation.name.value)); buf.writeInt(birAnnotation.flags); buf.writeInt(birAnnotation.attachPoints.size()); for (AttachPoint attachPoint : birAnnotation.attachPoints) { buf.writeInt(addStringCPEntry(attachPoint.point.getValue())); buf.writeBoolean(attachPoint.source); } writeType(buf, birAnnotation.annotationType); typeWriter.writeMarkdownDocAttachment(buf, birAnnotation.markdownDocAttachment); } private void writeConstants(ByteBuf buf, List<BIRNode.BIRConstant> birConstList) { BIRTypeWriter constTypeWriter = new BIRTypeWriter(buf, cp); buf.writeInt(birConstList.size()); birConstList.forEach(constant -> writeConstant(buf, constTypeWriter, constant)); } private void writeConstant(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRConstant birConstant) { buf.writeInt(addStringCPEntry(birConstant.name.value)); buf.writeInt(birConstant.flags); typeWriter.writeMarkdownDocAttachment(buf, birConstant.markdownDocAttachment); writeType(buf, birConstant.type); ByteBuf birbuf = Unpooled.buffer(); writeConstValue(birbuf, birConstant.constValue); int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeConstValue(ByteBuf buf, ConstValue constValue) { writeType(buf, constValue.type); switch (constValue.type.tag) { case TypeTags.INT: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: buf.writeInt(addIntCPEntry((Long) constValue.value)); break; case TypeTags.BYTE: int byteValue = ((Number) constValue.value).intValue(); buf.writeInt(addByteCPEntry(byteValue)); break; case TypeTags.FLOAT: double doubleVal = constValue.value instanceof String ? Double.parseDouble((String) constValue.value) : (Double) constValue.value; buf.writeInt(addFloatCPEntry(doubleVal)); break; case TypeTags.STRING: case TypeTags.CHAR_STRING: case TypeTags.DECIMAL: buf.writeInt(addStringCPEntry((String) constValue.value)); break; case TypeTags.BOOLEAN: buf.writeByte((Boolean) constValue.value ? 1 : 0); break; case TypeTags.NIL: break; case TypeTags.MAP: Map<String, ConstValue> mapConstVal = (Map<String, ConstValue>) constValue.value; buf.writeInt(mapConstVal.size()); mapConstVal.forEach((key, value) -> { buf.writeInt(addStringCPEntry(key)); writeConstValue(buf, value); }); break; default: throw new UnsupportedOperationException( "finite type value is not supported for type: " + constValue.type); } } private int addIntCPEntry(long value) { return cp.addCPEntry(new IntegerCPEntry(value)); } private int addFloatCPEntry(double value) { return cp.addCPEntry(new FloatCPEntry(value)); } private int addStringCPEntry(String value) { return cp.addCPEntry(new StringCPEntry(value)); } private int addByteCPEntry(int value) { return cp.addCPEntry(new ByteCPEntry(value)); } private void writeType(ByteBuf buf, BType type) { buf.writeInt(cp.addShapeCPEntry(type)); } void writeAnnotAttachments(ByteBuf buff, BIRInstructionWriter insWriter, List<BIRAnnotationAttachment> annotAttachments) { ByteBuf annotBuf = Unpooled.buffer(); annotBuf.writeInt(annotAttachments.size()); for (BIRAnnotationAttachment annotAttachment : annotAttachments) { writeAnnotAttachment(annotBuf, insWriter, annotAttachment); } int length = annotBuf.nioBuffer().limit(); buff.writeLong(length); buff.writeBytes(annotBuf.nioBuffer().array(), 0, length); } private void writeAnnotAttachment(ByteBuf annotBuf, BIRInstructionWriter insWriter, BIRAnnotationAttachment annotAttachment) { annotBuf.writeInt(insWriter.addPkgCPEntry(annotAttachment.packageID)); insWriter.writePosition(annotBuf, annotAttachment.pos); annotBuf.writeInt(addStringCPEntry(annotAttachment.annotTagRef.value)); writeAnnotAttachValues(annotBuf, annotAttachment.annotValues); } private void writeAnnotAttachValues(ByteBuf annotBuf, List<BIRAnnotationValue> annotValues) { annotBuf.writeInt(annotValues.size()); for (BIRAnnotationValue annotValue : annotValues) { writeAnnotAttachValue(annotBuf, annotValue); } } private void writeAnnotAttachValue(ByteBuf annotBuf, BIRAnnotationValue annotValue) { if (annotValue.type.tag == TypeTags.ARRAY) { writeType(annotBuf, annotValue.type); BIRAnnotationArrayValue annotArrayValue = (BIRAnnotationArrayValue) annotValue; annotBuf.writeInt(annotArrayValue.annotArrayValue.length); for (BIRAnnotationValue annotValueEntry : annotArrayValue.annotArrayValue) { writeAnnotAttachValue(annotBuf, annotValueEntry); } } else if (annotValue.type.tag == TypeTags.RECORD || annotValue.type.tag == TypeTags.MAP) { writeType(annotBuf, annotValue.type); BIRAnnotationRecordValue annotRecValue = (BIRAnnotationRecordValue) annotValue; annotBuf.writeInt(annotRecValue.annotValueEntryMap.size()); for (Map.Entry<String, BIRAnnotationValue> annotValueEntry : annotRecValue.annotValueEntryMap.entrySet()) { annotBuf.writeInt(addStringCPEntry(annotValueEntry.getKey())); writeAnnotAttachValue(annotBuf, annotValueEntry.getValue()); } } else { BIRAnnotationLiteralValue annotLiteralValue = (BIRAnnotationLiteralValue) annotValue; writeConstValue(annotBuf, new ConstValue(annotLiteralValue.value, annotLiteralValue.type)); } } }
class BIRBinaryWriter { private final ConstantPool cp = new ConstantPool(); private final BIRNode.BIRPackage birPackage; public BIRBinaryWriter(BIRNode.BIRPackage birPackage) { this.birPackage = birPackage; } public byte[] serialize() { ByteBuf birbuf = Unpooled.buffer(); BIRTypeWriter typeWriter = new BIRTypeWriter(birbuf, cp); birbuf.writeInt(BIRWriterUtils.addPkgCPEntry(this.birPackage, this.cp)); writeImportModuleDecls(birbuf, birPackage.importModules); writeConstants(birbuf, birPackage.constants); writeTypeDefs(birbuf, typeWriter, birPackage.typeDefs); writeGlobalVars(birbuf, typeWriter, birPackage.globalVars); writeTypeDefBodies(birbuf, typeWriter, birPackage.typeDefs); writeFunctions(birbuf, typeWriter, birPackage.functions); writeAnnotations(birbuf, typeWriter, birPackage.annotations); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DataOutputStream dataOut = new DataOutputStream(baos)) { dataOut.write(cp.serialize()); dataOut.write(birbuf.nioBuffer().array(), 0, birbuf.nioBuffer().limit()); return baos.toByteArray(); } catch (IOException e) { throw new BLangCompilerException("failed to serialize the bir", e); } } private void writeImportModuleDecls(ByteBuf buf, List<BIRNode.BIRImportModule> birImpModList) { buf.writeInt(birImpModList.size()); birImpModList.forEach(impMod -> { buf.writeInt(addStringCPEntry(impMod.org.value)); buf.writeInt(addStringCPEntry(impMod.name.value)); buf.writeInt(addStringCPEntry(impMod.version.value)); }); } /** * Write the type definitions. Only the container will be written, to avoid * cyclic dependencies with global vars. * * @param buf ByteBuf * @param typeWriter Type writer * @param birTypeDefList Type definitions list */ private void writeTypeDefs(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRTypeDefinition> birTypeDefList) { buf.writeInt(birTypeDefList.size()); birTypeDefList.forEach(typeDef -> writeType(buf, typeWriter, typeDef)); } /** * Write the body of the type definitions. * * @param buf ByteBuf * @param typeWriter Type writer * @param birTypeDefList Type definitions list */ private void writeTypeDefBodies(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRTypeDefinition> birTypeDefList) { List<BIRTypeDefinition> filtered = birTypeDefList.stream().filter(t -> t.type.tag == TypeTags.OBJECT || t.type.tag == TypeTags.RECORD).collect(Collectors.toList()); buf.writeInt(filtered.size()); filtered.forEach(typeDef -> { writeFunctions(buf, typeWriter, typeDef.attachedFuncs); writeReferencedTypes(buf, typeDef.referencedTypes); }); } private void writeReferencedTypes(ByteBuf buf, List<BType> referencedTypes) { buf.writeInt(referencedTypes.size()); referencedTypes.forEach(type -> writeType(buf, type)); } private void writeGlobalVars(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRGlobalVariableDcl> birGlobalVars) { buf.writeInt(birGlobalVars.size()); for (BIRGlobalVariableDcl birGlobalVar : birGlobalVars) { buf.writeByte(birGlobalVar.kind.getValue()); buf.writeInt(addStringCPEntry(birGlobalVar.name.value)); buf.writeInt(birGlobalVar.flags); buf.writeByte(birGlobalVar.origin.value()); typeWriter.writeMarkdownDocAttachment(buf, birGlobalVar.markdownDocAttachment); writeType(buf, birGlobalVar.type); } } private void writeType(ByteBuf buf, BIRTypeWriter typeWriter, BIRTypeDefinition typeDef) { writePosition(buf, typeDef.pos); buf.writeInt(addStringCPEntry(typeDef.name.value)); buf.writeInt(typeDef.flags); buf.writeByte(typeDef.isLabel ? 1 : 0); buf.writeByte(typeDef.origin.value()); typeWriter.writeMarkdownDocAttachment(buf, typeDef.markdownDocAttachment); writeType(buf, typeDef.type); } private void writeFunctions(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRNode.BIRFunction> birFunctionList) { buf.writeInt(birFunctionList.size()); birFunctionList.forEach(func -> writeFunction(buf, typeWriter, func)); } private void writeFunction(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRFunction birFunction) { writePosition(buf, birFunction.pos); buf.writeInt(addStringCPEntry(birFunction.name.value)); buf.writeInt(addStringCPEntry(birFunction.workerName.value)); buf.writeInt(birFunction.flags); buf.writeByte(birFunction.origin.value()); writeType(buf, birFunction.type); writeAnnotAttachments(buf, birFunction.annotAttachments); buf.writeInt(birFunction.requiredParams.size()); for (BIRParameter parameter : birFunction.requiredParams) { buf.writeInt(addStringCPEntry(parameter.name.value)); buf.writeInt(parameter.flags); } boolean restParamExist = birFunction.restParam != null; buf.writeBoolean(restParamExist); if (restParamExist) { buf.writeInt(addStringCPEntry(birFunction.restParam.name.value)); } boolean hasReceiverType = birFunction.receiver != null; buf.writeBoolean(hasReceiverType); if (hasReceiverType) { buf.writeByte(birFunction.receiver.kind.getValue()); writeType(buf, birFunction.receiver.type); buf.writeInt(addStringCPEntry(birFunction.receiver.name.value)); } writeTaintTable(buf, birFunction.taintTable); typeWriter.writeMarkdownDocAttachment(buf, birFunction.markdownDocAttachment); writeFunctionsGlobalVarDependency(buf, birFunction); ByteBuf birbuf = Unpooled.buffer(); ByteBuf scopebuf = Unpooled.buffer(); BIRInstructionWriter funcInsWriter = new BIRInstructionWriter(birbuf, scopebuf, cp, this); birbuf.writeInt(birFunction.argsCount); birbuf.writeBoolean(birFunction.returnVariable != null); if (birFunction.returnVariable != null) { birbuf.writeByte(birFunction.returnVariable.kind.getValue()); writeType(birbuf, birFunction.returnVariable.type); birbuf.writeInt(addStringCPEntry(birFunction.returnVariable.name.value)); } birbuf.writeInt(birFunction.parameters.size()); for (BIRNode.BIRFunctionParameter param : birFunction.parameters.keySet()) { birbuf.writeByte(param.kind.getValue()); writeType(birbuf, param.type); birbuf.writeInt(addStringCPEntry(param.name.value)); if (param.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(param.metaVarName != null ? param.metaVarName : "")); } birbuf.writeBoolean(param.hasDefaultExpr); } birbuf.writeInt(birFunction.localVars.size()); for (BIRNode.BIRVariableDcl localVar : birFunction.localVars) { birbuf.writeByte(localVar.kind.getValue()); writeType(birbuf, localVar.type); birbuf.writeInt(addStringCPEntry(localVar.name.value)); if (localVar.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); } if (localVar.kind.equals(VarKind.LOCAL)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); birbuf.writeInt(addStringCPEntry(localVar.endBB != null ? localVar.endBB.id.value : "")); birbuf.writeInt(addStringCPEntry(localVar.startBB != null ? localVar.startBB.id.value : "")); birbuf.writeInt(localVar.insOffset); } } birFunction.parameters.values().forEach(funcInsWriter::writeBBs); funcInsWriter.writeBBs(birFunction.basicBlocks); funcInsWriter.writeErrorTable(birFunction.errorTable); birbuf.writeInt(birFunction.workerChannels.length); for (BIRNode.ChannelDetails details : birFunction.workerChannels) { birbuf.writeInt(addStringCPEntry(details.name)); birbuf.writeBoolean(details.channelInSameStrand); birbuf.writeBoolean(details.send); } writeScopes(buf, scopebuf, funcInsWriter.getScopeCount()); int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeFunctionsGlobalVarDependency(ByteBuf buf, BIRNode.BIRFunction birFunction) { buf.writeInt(birFunction.dependentGlobalVars.size()); for (BIRNode.BIRVariableDcl var : birFunction.dependentGlobalVars) { buf.writeInt(addStringCPEntry(var.name.value)); } } private void writeScopes(ByteBuf buf, ByteBuf scopebuf, int scopeCount) { int length = scopebuf.nioBuffer().limit(); buf.writeLong(length + 4); buf.writeInt(scopeCount); buf.writeBytes(scopebuf.nioBuffer().array(), 0, length); } private void writeAnnotations(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRNode.BIRAnnotation> birAnnotationList) { buf.writeInt(birAnnotationList.size()); birAnnotationList.forEach(annotation -> writeAnnotation(buf, typeWriter, annotation)); } private void writeAnnotation(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRAnnotation birAnnotation) { buf.writeInt(addStringCPEntry(birAnnotation.name.value)); buf.writeInt(birAnnotation.flags); buf.writeByte(birAnnotation.origin.value()); writePosition(buf, birAnnotation.pos); buf.writeInt(birAnnotation.attachPoints.size()); for (AttachPoint attachPoint : birAnnotation.attachPoints) { buf.writeInt(addStringCPEntry(attachPoint.point.getValue())); buf.writeBoolean(attachPoint.source); } writeType(buf, birAnnotation.annotationType); typeWriter.writeMarkdownDocAttachment(buf, birAnnotation.markdownDocAttachment); } private void writeConstants(ByteBuf buf, List<BIRNode.BIRConstant> birConstList) { BIRTypeWriter constTypeWriter = new BIRTypeWriter(buf, cp); buf.writeInt(birConstList.size()); birConstList.forEach(constant -> writeConstant(buf, constTypeWriter, constant)); } private void writeConstant(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRConstant birConstant) { buf.writeInt(addStringCPEntry(birConstant.name.value)); buf.writeInt(birConstant.flags); buf.writeByte(birConstant.origin.value()); writePosition(buf, birConstant.pos); typeWriter.writeMarkdownDocAttachment(buf, birConstant.markdownDocAttachment); writeType(buf, birConstant.type); ByteBuf birbuf = Unpooled.buffer(); writeConstValue(birbuf, birConstant.constValue); int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeConstValue(ByteBuf buf, ConstValue constValue) { writeType(buf, constValue.type); switch (constValue.type.tag) { case TypeTags.INT: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: buf.writeInt(addIntCPEntry((Long) constValue.value)); break; case TypeTags.BYTE: int byteValue = ((Number) constValue.value).intValue(); buf.writeInt(addByteCPEntry(byteValue)); break; case TypeTags.FLOAT: double doubleVal = constValue.value instanceof String ? Double.parseDouble((String) constValue.value) : (Double) constValue.value; buf.writeInt(addFloatCPEntry(doubleVal)); break; case TypeTags.STRING: case TypeTags.CHAR_STRING: case TypeTags.DECIMAL: buf.writeInt(addStringCPEntry((String) constValue.value)); break; case TypeTags.BOOLEAN: buf.writeBoolean((Boolean) constValue.value); break; case TypeTags.NIL: break; case TypeTags.MAP: Map<String, ConstValue> mapConstVal = (Map<String, ConstValue>) constValue.value; buf.writeInt(mapConstVal.size()); mapConstVal.forEach((key, value) -> { buf.writeInt(addStringCPEntry(key)); writeConstValue(buf, value); }); break; default: throw new UnsupportedOperationException( "finite type value is not supported for type: " + constValue.type); } } private int addIntCPEntry(long value) { return cp.addCPEntry(new IntegerCPEntry(value)); } private int addFloatCPEntry(double value) { return cp.addCPEntry(new FloatCPEntry(value)); } private int addStringCPEntry(String value) { return cp.addCPEntry(new StringCPEntry(value)); } private int addByteCPEntry(int value) { return cp.addCPEntry(new ByteCPEntry(value)); } private void writeType(ByteBuf buf, BType type) { buf.writeInt(cp.addShapeCPEntry(type)); } void writeAnnotAttachments(ByteBuf buff, List<BIRAnnotationAttachment> annotAttachments) { ByteBuf annotBuf = Unpooled.buffer(); annotBuf.writeInt(annotAttachments.size()); for (BIRAnnotationAttachment annotAttachment : annotAttachments) { writeAnnotAttachment(annotBuf, annotAttachment); } int length = annotBuf.nioBuffer().limit(); buff.writeLong(length); buff.writeBytes(annotBuf.nioBuffer().array(), 0, length); } private void writeAnnotAttachment(ByteBuf annotBuf, BIRAnnotationAttachment annotAttachment) { annotBuf.writeInt(BIRWriterUtils.addPkgCPEntry(annotAttachment.packageID, this.cp)); writePosition(annotBuf, annotAttachment.pos); annotBuf.writeInt(addStringCPEntry(annotAttachment.annotTagRef.value)); writeAnnotAttachValues(annotBuf, annotAttachment.annotValues); } private void writeAnnotAttachValues(ByteBuf annotBuf, List<BIRAnnotationValue> annotValues) { annotBuf.writeInt(annotValues.size()); for (BIRAnnotationValue annotValue : annotValues) { writeAnnotAttachValue(annotBuf, annotValue); } } private void writeAnnotAttachValue(ByteBuf annotBuf, BIRAnnotationValue annotValue) { if (annotValue.type.tag == TypeTags.ARRAY) { writeType(annotBuf, annotValue.type); BIRAnnotationArrayValue annotArrayValue = (BIRAnnotationArrayValue) annotValue; annotBuf.writeInt(annotArrayValue.annotArrayValue.length); for (BIRAnnotationValue annotValueEntry : annotArrayValue.annotArrayValue) { writeAnnotAttachValue(annotBuf, annotValueEntry); } } else if (annotValue.type.tag == TypeTags.RECORD || annotValue.type.tag == TypeTags.MAP) { writeType(annotBuf, annotValue.type); BIRAnnotationRecordValue annotRecValue = (BIRAnnotationRecordValue) annotValue; annotBuf.writeInt(annotRecValue.annotValueEntryMap.size()); for (Map.Entry<String, BIRAnnotationValue> annotValueEntry : annotRecValue.annotValueEntryMap.entrySet()) { annotBuf.writeInt(addStringCPEntry(annotValueEntry.getKey())); writeAnnotAttachValue(annotBuf, annotValueEntry.getValue()); } } else { BIRAnnotationLiteralValue annotLiteralValue = (BIRAnnotationLiteralValue) annotValue; writeConstValue(annotBuf, new ConstValue(annotLiteralValue.value, annotLiteralValue.type)); } } private void writePosition(ByteBuf buf, DiagnosticPos pos) { BIRWriterUtils.writePosition(pos, buf, this.cp); } }
The fact that this method is only used in dev mode is not immediately obvious to me... So, maybe: ```suggestion VertxConfiguration vertxConfiguration = ConfigUtils.configBuilder(true, LaunchMode.current()) ```
public static void startServerAfterFailedStart() { if (closeTask != null) { final Handler<RoutingContext> prevHotReplacementHandler = hotReplacementHandler; shutDownDevMode(); hotReplacementHandler = prevHotReplacementHandler; } Supplier<Vertx> supplier = VertxCoreRecorder.getVertx(); Vertx vertx; if (supplier == null) { VertxConfiguration vertxConfiguration = ConfigUtils.configBuilder(true, LaunchMode.DEVELOPMENT) .withMapping(VertxConfiguration.class) .build() .getConfigMapping(VertxConfiguration.class); vertx = VertxCoreRecorder.recoverFailedStart(vertxConfiguration).get(); } else { vertx = supplier.get(); } try { HttpBuildTimeConfig buildConfig = new HttpBuildTimeConfig(); ConfigInstantiator.handleObject(buildConfig); ManagementInterfaceBuildTimeConfig managementBuildTimeConfig = new ManagementInterfaceBuildTimeConfig(); ConfigInstantiator.handleObject(managementBuildTimeConfig); HttpConfiguration config = new HttpConfiguration(); ConfigInstantiator.handleObject(config); ManagementInterfaceConfiguration managementConfig = new ManagementInterfaceConfiguration(); ConfigInstantiator.handleObject(managementConfig); if (config.host == null) { config.host = "localhost"; } Router router = Router.router(vertx); if (hotReplacementHandler != null) { router.route().order(Integer.MIN_VALUE).blockingHandler(hotReplacementHandler); } Handler<HttpServerRequest> root = router; LiveReloadConfig liveReloadConfig = new LiveReloadConfig(); ConfigInstantiator.handleObject(liveReloadConfig); if (liveReloadConfig.password.isPresent() && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) { root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext); } rootHandler = root; doServerStart(vertx, buildConfig, managementBuildTimeConfig, null, config, managementConfig, LaunchMode.DEVELOPMENT, new Supplier<Integer>() { @Override public Integer get() { return ProcessorInfo.availableProcessors(); } }, null, false); } catch (Exception e) { throw new RuntimeException(e); } }
VertxConfiguration vertxConfiguration = ConfigUtils.configBuilder(true, LaunchMode.DEVELOPMENT)
public static void startServerAfterFailedStart() { if (closeTask != null) { final Handler<RoutingContext> prevHotReplacementHandler = hotReplacementHandler; shutDownDevMode(); hotReplacementHandler = prevHotReplacementHandler; } Supplier<Vertx> supplier = VertxCoreRecorder.getVertx(); Vertx vertx; if (supplier == null) { VertxConfiguration vertxConfiguration = ConfigUtils.emptyConfigBuilder() .addDiscoveredSources() .withMapping(VertxConfiguration.class) .build().getConfigMapping(VertxConfiguration.class); vertx = VertxCoreRecorder.recoverFailedStart(vertxConfiguration).get(); } else { vertx = supplier.get(); } try { HttpBuildTimeConfig buildConfig = new HttpBuildTimeConfig(); ConfigInstantiator.handleObject(buildConfig); ManagementInterfaceBuildTimeConfig managementBuildTimeConfig = new ManagementInterfaceBuildTimeConfig(); ConfigInstantiator.handleObject(managementBuildTimeConfig); HttpConfiguration config = new HttpConfiguration(); ConfigInstantiator.handleObject(config); ManagementInterfaceConfiguration managementConfig = new ManagementInterfaceConfiguration(); ConfigInstantiator.handleObject(managementConfig); if (config.host == null) { config.host = "localhost"; } Router router = Router.router(vertx); if (hotReplacementHandler != null) { router.route().order(Integer.MIN_VALUE).blockingHandler(hotReplacementHandler); } Handler<HttpServerRequest> root = router; LiveReloadConfig liveReloadConfig = new LiveReloadConfig(); ConfigInstantiator.handleObject(liveReloadConfig); if (liveReloadConfig.password.isPresent() && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) { root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext); } rootHandler = root; doServerStart(vertx, buildConfig, managementBuildTimeConfig, null, config, managementConfig, LaunchMode.DEVELOPMENT, new Supplier<Integer>() { @Override public Integer get() { return ProcessorInfo.availableProcessors(); } }, null, false); } catch (Exception e) { throw new RuntimeException(e); } }
class VertxHttpRecorder { /** * The key that the request start time is stored under */ public static final String REQUEST_START_TIME = "io.quarkus.request-start-time"; public static final String MAX_REQUEST_SIZE_KEY = "io.quarkus.max-request-size"; private static final String DISABLE_WEBSOCKETS_PROP_NAME = "vertx.disableWebsockets"; /** * Order mark for route with priority over the default route (add an offset from this mark) **/ public static final int BEFORE_DEFAULT_ROUTE_ORDER_MARK = 1_000; /** * Default route order (i.e. Static Resources, Servlet) **/ public static final int DEFAULT_ROUTE_ORDER = 10_000; /** * Order mark for route without priority over the default route (add an offset from this mark) **/ public static final int AFTER_DEFAULT_ROUTE_ORDER_MARK = 20_000; private static final Logger LOGGER = Logger.getLogger(VertxHttpRecorder.class.getName()); private static volatile Handler<RoutingContext> hotReplacementHandler; private static volatile HotReplacementContext hotReplacementContext; private static volatile RemoteSyncHandler remoteSyncHandler; private static volatile Runnable closeTask; static volatile Handler<HttpServerRequest> rootHandler; private static volatile Handler<RoutingContext> nonApplicationRedirectHandler; private static volatile int actualHttpPort = -1; private static volatile int actualHttpsPort = -1; private static volatile int actualManagementPort = -1; public static final String GET = "GET"; private static final Handler<HttpServerRequest> ACTUAL_ROOT = new Handler<HttpServerRequest>() { /** JVM system property that disables URI validation, don't use this in production. */ private static final String DISABLE_URI_VALIDATION_PROP_NAME = "vertx.disableURIValidation"; /** * Disables HTTP headers validation, so we can save some processing and save some allocations. */ private final boolean DISABLE_URI_VALIDATION = Boolean.getBoolean(DISABLE_URI_VALIDATION_PROP_NAME); @Override public void handle(HttpServerRequest httpServerRequest) { if (!uriValid(httpServerRequest)) { httpServerRequest.response().setStatusCode(400).end(); return; } httpServerRequest.pause(); Handler<HttpServerRequest> rh = VertxHttpRecorder.rootHandler; if (rh != null) { rh.handle(httpServerRequest); } else { httpServerRequest.resume(); httpServerRequest.response().setStatusCode(503).end(); } } private boolean uriValid(HttpServerRequest httpServerRequest) { if (DISABLE_URI_VALIDATION) { return true; } try { new URI(httpServerRequest.uri()); return true; } catch (URISyntaxException e) { return false; } } }; private static HttpServerOptions httpMainSslServerOptions; private static HttpServerOptions httpMainServerOptions; private static HttpServerOptions httpMainDomainSocketOptions; private static HttpServerOptions httpManagementServerOptions; final HttpBuildTimeConfig httpBuildTimeConfig; final ManagementInterfaceBuildTimeConfig managementBuildTimeConfig; final RuntimeValue<HttpConfiguration> httpConfiguration; final RuntimeValue<ManagementInterfaceConfiguration> managementConfiguration; private static volatile Handler<HttpServerRequest> managementRouter; public VertxHttpRecorder(HttpBuildTimeConfig httpBuildTimeConfig, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, RuntimeValue<HttpConfiguration> httpConfiguration, RuntimeValue<ManagementInterfaceConfiguration> managementConfiguration) { this.httpBuildTimeConfig = httpBuildTimeConfig; this.httpConfiguration = httpConfiguration; this.managementBuildTimeConfig = managementBuildTimeConfig; this.managementConfiguration = managementConfiguration; } public static void setHotReplacement(Handler<RoutingContext> handler, HotReplacementContext hrc) { hotReplacementHandler = handler; hotReplacementContext = hrc; } public static void shutDownDevMode() { if (closeTask != null) { closeTask.run(); closeTask = null; } rootHandler = null; hotReplacementHandler = null; } public RuntimeValue<Router> initializeRouter(final Supplier<Vertx> vertxRuntimeValue) { Vertx vertx = vertxRuntimeValue.get(); Router router = Router.router(vertx); return new RuntimeValue<>(router); } public RuntimeValue<io.vertx.mutiny.ext.web.Router> createMutinyRouter(final RuntimeValue<Router> router) { return new RuntimeValue<>(new io.vertx.mutiny.ext.web.Router(router.getValue())); } public void startServer(Supplier<Vertx> vertx, ShutdownContext shutdown, LaunchMode launchMode, boolean startVirtual, boolean startSocket, Supplier<Integer> ioThreads, List<String> websocketSubProtocols, boolean auxiliaryApplication, boolean disableWebSockets) throws IOException { if (disableWebSockets && !System.getProperties().containsKey(DISABLE_WEBSOCKETS_PROP_NAME)) { System.setProperty(DISABLE_WEBSOCKETS_PROP_NAME, "true"); } if (startVirtual) { initializeVirtual(vertx.get()); shutdown.addShutdownTask(() -> { try { virtualBootstrapChannel.channel().close().sync(); } catch (InterruptedException e) { LOGGER.warn("Unable to close virtualBootstrapChannel"); } finally { virtualBootstrapChannel = null; virtualBootstrap = null; } }); } HttpConfiguration httpConfiguration = this.httpConfiguration.getValue(); ManagementInterfaceConfiguration managementConfig = this.managementConfiguration == null ? null : this.managementConfiguration.getValue(); if (startSocket && (httpConfiguration.hostEnabled || httpConfiguration.domainSocketEnabled || managementConfig.hostEnabled || managementConfig.domainSocketEnabled)) { if (closeTask == null) { doServerStart(vertx.get(), httpBuildTimeConfig, managementBuildTimeConfig, managementRouter, httpConfiguration, managementConfig, launchMode, ioThreads, websocketSubProtocols, auxiliaryApplication); if (launchMode != LaunchMode.DEVELOPMENT) { shutdown.addShutdownTask(closeTask); } else { shutdown.addShutdownTask(new Runnable() { @Override public void run() { VertxHttpHotReplacementSetup.handleDevModeRestart(); } }); } } } } public void mountFrameworkRouter(RuntimeValue<Router> mainRouter, RuntimeValue<Router> frameworkRouter, String frameworkPath) { mainRouter.getValue().mountSubRouter(frameworkPath, frameworkRouter.getValue()); } public void finalizeRouter(BeanContainer container, Consumer<Route> defaultRouteHandler, List<Filter> filterList, List<Filter> managementInterfaceFilterList, Supplier<Vertx> vertx, LiveReloadConfig liveReloadConfig, Optional<RuntimeValue<Router>> mainRouterRuntimeValue, RuntimeValue<Router> httpRouterRuntimeValue, RuntimeValue<io.vertx.mutiny.ext.web.Router> mutinyRouter, RuntimeValue<Router> frameworkRouter, RuntimeValue<Router> managementRouter, String rootPath, String nonRootPath, LaunchMode launchMode, boolean requireBodyHandler, Handler<RoutingContext> bodyHandler, GracefulShutdownFilter gracefulShutdownFilter, ShutdownConfig shutdownConfig, Executor executor) { HttpConfiguration httpConfiguration = this.httpConfiguration.getValue(); Router httpRouteRouter = httpRouterRuntimeValue.getValue(); Event<Object> event = Arc.container().beanManager().getEvent(); Filters filters = new Filters(); event.select(Filters.class).fire(filters); filterList.addAll(filters.getFilters()); event.select(Router.class, Default.Literal.INSTANCE).fire(httpRouteRouter); event.select(io.vertx.mutiny.ext.web.Router.class).fire(mutinyRouter.getValue()); for (Filter filter : filterList) { if (filter.getHandler() != null) { if (filter.isFailureHandler()) { httpRouteRouter.route().order(-1 * filter.getPriority()).failureHandler(filter.getHandler()); } else { httpRouteRouter.route().order(-1 * filter.getPriority()).handler(filter.getHandler()); } } } if (defaultRouteHandler != null) { defaultRouteHandler.accept(httpRouteRouter.route().order(DEFAULT_ROUTE_ORDER)); } applyCompression(httpBuildTimeConfig.enableCompression, httpRouteRouter); httpRouteRouter.route().last().failureHandler( new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault)); if (requireBodyHandler) { httpRouteRouter.route().order(Integer.MIN_VALUE + 1).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext routingContext) { routingContext.request().resume(); bodyHandler.handle(routingContext); } }); } HttpServerCommonHandlers.enforceMaxBodySize(httpConfiguration.limits, httpRouteRouter); var filtersInConfig = httpConfiguration.filter; HttpServerCommonHandlers.applyFilters(filtersInConfig, httpRouteRouter); HttpServerCommonHandlers.applyHeaders(httpConfiguration.header, httpRouteRouter); Handler<HttpServerRequest> root; if (rootPath.equals("/")) { if (hotReplacementHandler != null) { ClassLoader currentCl = Thread.currentThread().getContextClassLoader(); httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { Thread.currentThread().setContextClassLoader(currentCl); hotReplacementHandler.handle(event); } }); } root = httpRouteRouter; } else { Router mainRouter = mainRouterRuntimeValue.isPresent() ? mainRouterRuntimeValue.get().getValue() : Router.router(vertx.get()); mainRouter.mountSubRouter(rootPath, httpRouteRouter); if (hotReplacementHandler != null) { ClassLoader currentCl = Thread.currentThread().getContextClassLoader(); mainRouter.route().order(Integer.MIN_VALUE).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { Thread.currentThread().setContextClassLoader(currentCl); hotReplacementHandler.handle(event); } }); } root = mainRouter; } warnIfProxyAddressForwardingAllowedWithMultipleHeaders(httpConfiguration.proxy); root = HttpServerCommonHandlers.applyProxy(httpConfiguration.proxy, root, vertx); boolean quarkusWrapperNeeded = false; if (shutdownConfig.isShutdownTimeoutSet()) { gracefulShutdownFilter.next(root); root = gracefulShutdownFilter; quarkusWrapperNeeded = true; } AccessLogConfig accessLog = httpConfiguration.accessLog; if (accessLog.enabled) { AccessLogReceiver receiver; if (accessLog.logToFile) { File outputDir = accessLog.logDirectory.isPresent() ? new File(accessLog.logDirectory.get()) : new File(""); receiver = new DefaultAccessLogReceiver(executor, outputDir, accessLog.baseFileName, accessLog.logSuffix, accessLog.rotate); } else { receiver = new JBossLoggingAccessLogReceiver(accessLog.category); } AccessLogHandler handler = new AccessLogHandler(receiver, accessLog.pattern, getClass().getClassLoader(), accessLog.excludePattern); if (rootPath.equals("/") || nonRootPath.equals("/")) { mainRouterRuntimeValue.orElse(httpRouterRuntimeValue).getValue().route().order(Integer.MIN_VALUE) .handler(handler); } else if (nonRootPath.startsWith(rootPath)) { httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler); } else if (rootPath.startsWith(nonRootPath)) { frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler); } else { httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler); frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler); } quarkusWrapperNeeded = true; } BiConsumer<Cookie, HttpServerRequest> cookieFunction = null; if (!httpConfiguration.sameSiteCookie.isEmpty()) { cookieFunction = processSameSiteConfig(httpConfiguration.sameSiteCookie); quarkusWrapperNeeded = true; } BiConsumer<Cookie, HttpServerRequest> cookieConsumer = cookieFunction; if (quarkusWrapperNeeded) { Handler<HttpServerRequest> old = root; root = new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { old.handle(new QuarkusRequestWrapper(event, cookieConsumer)); } }; } Handler<HttpServerRequest> delegate = root; root = HttpServerCommonHandlers.enforceDuplicatedContext(delegate); if (httpConfiguration.recordRequestStartTime) { httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.put(REQUEST_START_TIME, System.nanoTime()); event.next(); } }); } if (launchMode == LaunchMode.DEVELOPMENT && liveReloadConfig.password.isPresent() && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) { root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext); } rootHandler = root; if (managementRouter != null && managementRouter.getValue() != null) { var mr = managementRouter.getValue(); mr.route().last().failureHandler( new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault)); mr.route().order(Integer.MIN_VALUE).handler(createBodyHandlerForManagementInterface()); mr.route().order(Integer.MIN_VALUE).handler(CorsHandler.create().addOrigin("*")); HttpServerCommonHandlers.applyFilters(managementConfiguration.getValue().filter, mr); for (Filter filter : managementInterfaceFilterList) { mr.route().order(filter.getPriority()).handler(filter.getHandler()); } HttpServerCommonHandlers.applyHeaders(managementConfiguration.getValue().header, mr); HttpServerCommonHandlers.enforceMaxBodySize(managementConfiguration.getValue().limits, mr); applyCompression(managementBuildTimeConfig.enableCompression, mr); Handler<HttpServerRequest> handler = HttpServerCommonHandlers.enforceDuplicatedContext(mr); handler = HttpServerCommonHandlers.applyProxy(managementConfiguration.getValue().proxy, handler, vertx); event.select(ManagementInterface.class).fire(new ManagementInterfaceImpl(managementRouter.getValue())); VertxHttpRecorder.managementRouter = handler; } } private void applyCompression(boolean enableCompression, Router httpRouteRouter) { if (enableCompression) { httpRouteRouter.route().order(0).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext ctx) { ctx.response().putHeader(HttpHeaders.CONTENT_ENCODING, HttpHeaders.IDENTITY); ctx.next(); } }); } } private void warnIfProxyAddressForwardingAllowedWithMultipleHeaders(ProxyConfig proxyConfig) { boolean proxyAddressForwardingActivated = proxyConfig.proxyAddressForwarding; boolean forwardedActivated = proxyConfig.allowForwarded; boolean xForwardedActivated = proxyConfig.allowXForwarded.orElse(!forwardedActivated); if (proxyAddressForwardingActivated && forwardedActivated && xForwardedActivated) { LOGGER.warn( "The X-Forwarded-* and Forwarded headers will be considered when determining the proxy address. " + "This configuration can cause a security issue as clients can forge requests and send a " + "forwarded header that is not overwritten by the proxy. " + "Please consider use one of these headers just to forward the proxy address in requests."); } } private static CompletableFuture<HttpServer> initializeManagementInterfaceWithDomainSocket(Vertx vertx, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler<HttpServerRequest> managementRouter, ManagementInterfaceConfiguration managementConfig, List<String> websocketSubProtocols) { CompletableFuture<HttpServer> managementInterfaceDomainSocketFuture = new CompletableFuture<>(); if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) { managementInterfaceDomainSocketFuture.complete(null); return managementInterfaceDomainSocketFuture; } HttpServerOptions domainSocketOptionsForManagement = createDomainSocketOptionsForManagementInterface( managementBuildTimeConfig, managementConfig, websocketSubProtocols); if (domainSocketOptionsForManagement != null) { vertx.createHttpServer(domainSocketOptionsForManagement) .requestHandler(managementRouter) .listen(ar -> { if (ar.failed()) { managementInterfaceDomainSocketFuture.completeExceptionally( new IllegalStateException( "Unable to start the management interface on the " + domainSocketOptionsForManagement.getHost() + " domain socket", ar.cause())); } else { managementInterfaceDomainSocketFuture.complete(ar.result()); } }); } else { managementInterfaceDomainSocketFuture.complete(null); } return managementInterfaceDomainSocketFuture; } private static CompletableFuture<HttpServer> initializeManagementInterface(Vertx vertx, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler<HttpServerRequest> managementRouter, ManagementInterfaceConfiguration managementConfig, LaunchMode launchMode, List<String> websocketSubProtocols) throws IOException { httpManagementServerOptions = null; CompletableFuture<HttpServer> managementInterfaceFuture = new CompletableFuture<>(); if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) { managementInterfaceFuture.complete(null); return managementInterfaceFuture; } HttpServerOptions httpServerOptionsForManagement = createHttpServerOptionsForManagementInterface( managementBuildTimeConfig, managementConfig, launchMode, websocketSubProtocols); httpManagementServerOptions = HttpServerOptionsUtils.createSslOptionsForManagementInterface( managementBuildTimeConfig, managementConfig, launchMode, websocketSubProtocols); if (httpManagementServerOptions != null && httpManagementServerOptions.getKeyCertOptions() == null) { httpManagementServerOptions = httpServerOptionsForManagement; } if (httpManagementServerOptions != null) { vertx.createHttpServer(httpManagementServerOptions) .requestHandler(managementRouter) .listen(ar -> { if (ar.failed()) { managementInterfaceFuture.completeExceptionally( new IllegalStateException("Unable to start the management interface", ar.cause())); } else { actualManagementPort = ar.result().actualPort(); managementInterfaceFuture.complete(ar.result()); } }); } else { managementInterfaceFuture.complete(null); } return managementInterfaceFuture; } private static CompletableFuture<String> initializeMainHttpServer(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig, HttpConfiguration httpConfiguration, LaunchMode launchMode, Supplier<Integer> eventLoops, List<String> websocketSubProtocols) throws IOException { if (!httpConfiguration.hostEnabled && !httpConfiguration.domainSocketEnabled) { return CompletableFuture.completedFuture(null); } httpMainServerOptions = createHttpServerOptions(httpBuildTimeConfig, httpConfiguration, launchMode, websocketSubProtocols); httpMainDomainSocketOptions = createDomainSocketOptions(httpBuildTimeConfig, httpConfiguration, websocketSubProtocols); HttpServerOptions tmpSslConfig = HttpServerOptionsUtils.createSslOptions(httpBuildTimeConfig, httpConfiguration, launchMode, websocketSubProtocols); if (Arc.container() != null) { List<InstanceHandle<HttpServerOptionsCustomizer>> instances = Arc.container() .listAll(HttpServerOptionsCustomizer.class); for (InstanceHandle<HttpServerOptionsCustomizer> instance : instances) { HttpServerOptionsCustomizer customizer = instance.get(); if (httpMainServerOptions != null) { customizer.customizeHttpServer(httpMainServerOptions); } if (tmpSslConfig != null) { customizer.customizeHttpsServer(tmpSslConfig); } if (httpMainDomainSocketOptions != null) { customizer.customizeDomainSocketServer(httpMainDomainSocketOptions); } } } if (tmpSslConfig != null && tmpSslConfig.getKeyCertOptions() == null) { tmpSslConfig = null; } httpMainSslServerOptions = tmpSslConfig; if (httpConfiguration.insecureRequests != HttpConfiguration.InsecureRequests.ENABLED && httpMainSslServerOptions == null) { throw new IllegalStateException("Cannot set quarkus.http.redirect-insecure-requests without enabling SSL."); } int eventLoopCount = eventLoops.get(); final int ioThreads; if (httpConfiguration.ioThreads.isPresent()) { ioThreads = Math.min(httpConfiguration.ioThreads.getAsInt(), eventLoopCount); } else if (launchMode.isDevOrTest()) { ioThreads = Math.min(2, eventLoopCount); } else { ioThreads = eventLoopCount; } CompletableFuture<String> futureResult = new CompletableFuture<>(); AtomicInteger connectionCount = new AtomicInteger(); vertx.deployVerticle(new Supplier<Verticle>() { @Override public Verticle get() { return new WebDeploymentVerticle(httpMainServerOptions, httpMainSslServerOptions, httpMainDomainSocketOptions, launchMode, httpConfiguration.insecureRequests, httpConfiguration, connectionCount); } }, new DeploymentOptions().setInstances(ioThreads), new Handler<AsyncResult<String>>() { @Override public void handle(AsyncResult<String> event) { if (event.failed()) { Throwable effectiveCause = event.cause(); if (effectiveCause instanceof BindException) { List<Integer> portsUsed = Collections.emptyList(); if ((httpMainSslServerOptions == null) && (httpMainServerOptions != null)) { portsUsed = List.of(httpMainServerOptions.getPort()); } else if ((httpConfiguration.insecureRequests == InsecureRequests.DISABLED) && (httpMainSslServerOptions != null)) { portsUsed = List.of(httpMainSslServerOptions.getPort()); } else if ((httpMainSslServerOptions != null) && (httpConfiguration.insecureRequests == InsecureRequests.ENABLED) && (httpMainServerOptions != null)) { portsUsed = List.of(httpMainServerOptions.getPort(), httpMainSslServerOptions.getPort()); } effectiveCause = new QuarkusBindException((BindException) effectiveCause, portsUsed); } futureResult.completeExceptionally(effectiveCause); } else { futureResult.complete(event.result()); } } }); return futureResult; } private static void doServerStart(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler<HttpServerRequest> managementRouter, HttpConfiguration httpConfiguration, ManagementInterfaceConfiguration managementConfig, LaunchMode launchMode, Supplier<Integer> eventLoops, List<String> websocketSubProtocols, boolean auxiliaryApplication) throws IOException { var mainServerFuture = initializeMainHttpServer(vertx, httpBuildTimeConfig, httpConfiguration, launchMode, eventLoops, websocketSubProtocols); var managementInterfaceFuture = initializeManagementInterface(vertx, managementBuildTimeConfig, managementRouter, managementConfig, launchMode, websocketSubProtocols); var managementInterfaceDomainSocketFuture = initializeManagementInterfaceWithDomainSocket(vertx, managementBuildTimeConfig, managementRouter, managementConfig, websocketSubProtocols); try { String deploymentIdIfAny = mainServerFuture.get(); HttpServer tmpManagementServer = null; HttpServer tmpManagementServerUsingDomainSocket = null; if (managementRouter != null) { tmpManagementServer = managementInterfaceFuture.get(); tmpManagementServerUsingDomainSocket = managementInterfaceDomainSocketFuture.get(); } HttpServer managementServer = tmpManagementServer; HttpServer managementServerDomainSocket = tmpManagementServerUsingDomainSocket; if (deploymentIdIfAny != null) { VertxCoreRecorder.setWebDeploymentId(deploymentIdIfAny); } closeTask = new Runnable() { @Override public synchronized void run() { if (closeTask == this) { boolean isVertxClose = ((VertxInternal) vertx).closeFuture().future().isComplete(); int count = 0; if (deploymentIdIfAny != null && vertx.deploymentIDs().contains(deploymentIdIfAny)) { count++; } if (managementServer != null && !isVertxClose) { count++; } if (managementServerDomainSocket != null && !isVertxClose) { count++; } CountDownLatch latch = new CountDownLatch(count); var handler = new Handler<AsyncResult<Void>>() { @Override public void handle(AsyncResult<Void> event) { latch.countDown(); } }; if (deploymentIdIfAny != null) { try { vertx.undeploy(deploymentIdIfAny, handler); } catch (Exception e) { if (e instanceof RejectedExecutionException) { LOGGER.debug("Failed to undeploy deployment because a task was rejected (due to shutdown)", e); } else { LOGGER.warn("Failed to undeploy deployment", e); } } } try { if (managementServer != null && !isVertxClose) { managementServer.close(handler); } if (managementServerDomainSocket != null && !isVertxClose) { managementServerDomainSocket.close(handler); } } catch (Exception e) { LOGGER.warn("Unable to shutdown the management interface quietly", e); } try { latch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } } closeTask = null; if (remoteSyncHandler != null) { remoteSyncHandler.close(); remoteSyncHandler = null; } } }; } catch (InterruptedException | ExecutionException e) { throw new RuntimeException("Unable to start HTTP server", e); } setHttpServerTiming(httpConfiguration.insecureRequests, httpMainServerOptions, httpMainSslServerOptions, httpMainDomainSocketOptions, auxiliaryApplication, httpManagementServerOptions); } private static void setHttpServerTiming(InsecureRequests insecureRequests, HttpServerOptions httpServerOptions, HttpServerOptions sslConfig, HttpServerOptions domainSocketOptions, boolean auxiliaryApplication, HttpServerOptions managementConfig) { StringBuilder serverListeningMessage = new StringBuilder("Listening on: "); int socketCount = 0; if (httpServerOptions != null && !InsecureRequests.DISABLED.equals(insecureRequests)) { serverListeningMessage.append(String.format( "http: socketCount++; } if (sslConfig != null) { if (socketCount > 0) { serverListeningMessage.append(" and "); } serverListeningMessage.append(String.format("https: socketCount++; } if (domainSocketOptions != null) { if (socketCount > 0) { serverListeningMessage.append(" and "); } serverListeningMessage.append(String.format("unix:%s", domainSocketOptions.getHost())); } if (managementConfig != null) { serverListeningMessage.append( String.format(". Management interface listening on http%s: managementConfig.getHost(), managementConfig.getPort())); } Timing.setHttpServer(serverListeningMessage.toString(), auxiliaryApplication); } private static HttpServerOptions createHttpServerOptions( HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration, LaunchMode launchMode, List<String> websocketSubProtocols) { if (!httpConfiguration.hostEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); int port = httpConfiguration.determinePort(launchMode); options.setPort(port == 0 ? -1 : port); HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); return options; } private static HttpServerOptions createHttpServerOptionsForManagementInterface( ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration, LaunchMode launchMode, List<String> websocketSubProtocols) { if (!httpConfiguration.hostEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); int port = httpConfiguration.determinePort(launchMode); options.setPort(port == 0 ? -1 : port); HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); return options; } private static HttpServerOptions createDomainSocketOptions( HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration, List<String> websocketSubProtocols) { if (!httpConfiguration.domainSocketEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); options.setHost(httpConfiguration.domainSocket); File file = new File(httpConfiguration.domainSocket); if (!file.getParentFile().canWrite()) { LOGGER.warnf( "Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.", httpConfiguration.domainSocket); } return options; } private static HttpServerOptions createDomainSocketOptionsForManagementInterface( ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration, List<String> websocketSubProtocols) { if (!httpConfiguration.domainSocketEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); options.setHost(httpConfiguration.domainSocket); File file = new File(httpConfiguration.domainSocket); if (!file.getParentFile().canWrite()) { LOGGER.warnf( "Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.", httpConfiguration.domainSocket); } return options; } public void addRoute(RuntimeValue<Router> router, Function<Router, Route> route, Handler<RoutingContext> handler, HandlerType type) { Route vr = route.apply(router.getValue()); if (type == HandlerType.BLOCKING) { vr.blockingHandler(handler, false); } else if (type == HandlerType.FAILURE) { vr.failureHandler(handler); } else { vr.handler(handler); } } public void setNonApplicationRedirectHandler(String nonApplicationPath, String rootPath) { nonApplicationRedirectHandler = new Handler<RoutingContext>() { @Override public void handle(RoutingContext context) { String absoluteURI = context.request().path(); String target = absoluteURI.substring(rootPath.length()); String redirectTo = nonApplicationPath + target; String query = context.request().query(); if (query != null && !query.isEmpty()) { redirectTo += '?' + query; } context.response() .setStatusCode(HttpResponseStatus.MOVED_PERMANENTLY.code()) .putHeader(HttpHeaderNames.LOCATION, redirectTo) .end(); } }; } public Handler<RoutingContext> getNonApplicationRedirectHandler() { return nonApplicationRedirectHandler; } public GracefulShutdownFilter createGracefulShutdownHandler() { return new GracefulShutdownFilter(); } private static class WebDeploymentVerticle extends AbstractVerticle implements Resource { private HttpServer httpServer; private HttpServer httpsServer; private HttpServer domainSocketServer; private final HttpServerOptions httpOptions; private final HttpServerOptions httpsOptions; private final HttpServerOptions domainSocketOptions; private final LaunchMode launchMode; private volatile boolean clearHttpProperty = false; private volatile boolean clearHttpsProperty = false; private volatile PortSystemProperties portSystemProperties; private final HttpConfiguration.InsecureRequests insecureRequests; private final HttpConfiguration quarkusConfig; private final AtomicInteger connectionCount; public WebDeploymentVerticle(HttpServerOptions httpOptions, HttpServerOptions httpsOptions, HttpServerOptions domainSocketOptions, LaunchMode launchMode, InsecureRequests insecureRequests, HttpConfiguration quarkusConfig, AtomicInteger connectionCount) { this.httpOptions = httpOptions; this.httpsOptions = httpsOptions; this.launchMode = launchMode; this.domainSocketOptions = domainSocketOptions; this.insecureRequests = insecureRequests; this.quarkusConfig = quarkusConfig; this.connectionCount = connectionCount; org.crac.Core.getGlobalContext().register(this); } @Override public void start(Promise<Void> startFuture) { final AtomicInteger remainingCount = new AtomicInteger(0); boolean httpServerEnabled = httpOptions != null && insecureRequests != HttpConfiguration.InsecureRequests.DISABLED; if (httpServerEnabled) { remainingCount.incrementAndGet(); } if (httpsOptions != null) { remainingCount.incrementAndGet(); } if (domainSocketOptions != null) { remainingCount.incrementAndGet(); } if (remainingCount.get() == 0) { startFuture .fail(new IllegalArgumentException("Must configure at least one of http, https or unix domain socket")); } if (httpServerEnabled) { httpServer = vertx.createHttpServer(httpOptions); if (insecureRequests == HttpConfiguration.InsecureRequests.ENABLED) { httpServer.requestHandler(ACTUAL_ROOT); } else { httpServer.requestHandler(new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest req) { try { String host = req.getHeader(HttpHeaderNames.HOST); if (host == null) { req.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end(); } else { int includedPort = host.indexOf(":"); if (includedPort != -1) { host = host.substring(0, includedPort); } req.response() .setStatusCode(301) .putHeader("Location", "https: .end(); } } catch (Exception e) { req.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end(); } } }); } setupTcpHttpServer(httpServer, httpOptions, false, startFuture, remainingCount, connectionCount); } if (domainSocketOptions != null) { domainSocketServer = vertx.createHttpServer(domainSocketOptions); domainSocketServer.requestHandler(ACTUAL_ROOT); setupUnixDomainSocketHttpServer(domainSocketServer, domainSocketOptions, startFuture, remainingCount); } if (httpsOptions != null) { httpsServer = vertx.createHttpServer(httpsOptions); httpsServer.requestHandler(ACTUAL_ROOT); setupTcpHttpServer(httpsServer, httpsOptions, true, startFuture, remainingCount, connectionCount); } } private void setupUnixDomainSocketHttpServer(HttpServer httpServer, HttpServerOptions options, Promise<Void> startFuture, AtomicInteger remainingCount) { httpServer.listen(SocketAddress.domainSocketAddress(options.getHost()), event -> { if (event.succeeded()) { if (remainingCount.decrementAndGet() == 0) { startFuture.complete(null); } } else { if (event.cause() != null && event.cause().getMessage() != null && event.cause().getMessage().contains("Permission denied")) { startFuture.fail(new IllegalStateException( String.format( "Unable to bind to Unix domain socket (%s) as the application does not have the permission to write in the directory.", domainSocketOptions.getHost()))); } else if (event.cause() instanceof IllegalArgumentException) { startFuture.fail(new IllegalArgumentException( String.format( "Unable to bind to Unix domain socket. Consider adding the 'io.netty:%s' dependency. See the Quarkus Vert.x reference guide for more details.", Utils.isLinux() ? "netty-transport-native-epoll" : "netty-transport-native-kqueue"))); } else { startFuture.fail(event.cause()); } } }); } private void setupTcpHttpServer(HttpServer httpServer, HttpServerOptions options, boolean https, Promise<Void> startFuture, AtomicInteger remainingCount, AtomicInteger currentConnectionCount) { if (quarkusConfig.limits.maxConnections.isPresent() && quarkusConfig.limits.maxConnections.getAsInt() > 0) { final int maxConnections = quarkusConfig.limits.maxConnections.getAsInt(); httpServer.connectionHandler(new Handler<HttpConnection>() { @Override public void handle(HttpConnection event) { int current; do { current = currentConnectionCount.get(); if (current == maxConnections) { LOGGER.debug("Rejecting connection as there are too many active connections"); event.close(); return; } } while (!currentConnectionCount.compareAndSet(current, current + 1)); event.closeHandler(new Handler<Void>() { @Override public void handle(Void event) { LOGGER.debug("Connection closed"); connectionCount.decrementAndGet(); } }); } }); } httpServer.listen(options.getPort(), options.getHost(), new Handler<>() { @Override public void handle(AsyncResult<HttpServer> event) { if (event.cause() != null) { startFuture.fail(event.cause()); } else { int actualPort = event.result().actualPort(); if (https) { actualHttpsPort = actualPort; } else { actualHttpPort = actualPort; } if (actualPort != options.getPort()) { String schema; if (https) { clearHttpsProperty = true; schema = "https"; } else { clearHttpProperty = true; actualHttpPort = actualPort; schema = "http"; } portSystemProperties = new PortSystemProperties(); portSystemProperties.set(schema, actualPort, launchMode); } if (remainingCount.decrementAndGet() == 0) { startFuture.complete(null); } } } }); } @Override public void stop(Promise<Void> stopFuture) { final AtomicInteger remainingCount = new AtomicInteger(0); if (httpServer != null) { remainingCount.incrementAndGet(); } if (httpsServer != null) { remainingCount.incrementAndGet(); } if (domainSocketServer != null) { remainingCount.incrementAndGet(); } Handler<AsyncResult<Void>> handleClose = event -> { if (remainingCount.decrementAndGet() == 0) { if (clearHttpProperty) { String portPropertyName = launchMode == LaunchMode.TEST ? "quarkus.http.test-port" : "quarkus.http.port"; System.clearProperty(portPropertyName); if (launchMode.isDevOrTest()) { System.clearProperty(propertyWithProfilePrefix(portPropertyName)); } } if (clearHttpsProperty) { String portPropertyName = launchMode == LaunchMode.TEST ? "quarkus.http.test-ssl-port" : "quarkus.http.ssl-port"; System.clearProperty(portPropertyName); if (launchMode.isDevOrTest()) { System.clearProperty(propertyWithProfilePrefix(portPropertyName)); } } if (portSystemProperties != null) { portSystemProperties.restore(); } stopFuture.complete(); } }; if (httpServer != null) { httpServer.close(handleClose); } if (httpsServer != null) { httpsServer.close(handleClose); } if (domainSocketServer != null) { domainSocketServer.close(handleClose); } } private String propertyWithProfilePrefix(String portPropertyName) { return "%" + launchMode.getDefaultProfile() + "." + portPropertyName; } @Override public void beforeCheckpoint(org.crac.Context<? extends Resource> context) throws Exception { Promise<Void> p = Promise.promise(); stop(p); CountDownLatch latch = new CountDownLatch(1); p.future().onComplete(event -> latch.countDown()); latch.await(); } @Override public void afterRestore(org.crac.Context<? extends Resource> context) throws Exception { Promise<Void> p = Promise.promise(); start(p); CountDownLatch latch = new CountDownLatch(1); p.future().onComplete(event -> latch.countDown()); latch.await(); } } protected static ServerBootstrap virtualBootstrap; protected static ChannelFuture virtualBootstrapChannel; public static VirtualAddress VIRTUAL_HTTP = new VirtualAddress("netty-virtual-http"); private static void initializeVirtual(Vertx vertxRuntime) { if (virtualBootstrap != null) { return; } VertxInternal vertx = (VertxInternal) vertxRuntime; virtualBootstrap = new ServerBootstrap(); virtualBootstrap.group(vertx.getEventLoopGroup()) .channel(VirtualServerChannel.class) .handler(new ChannelInitializer<VirtualServerChannel>() { @Override public void initChannel(VirtualServerChannel ch) throws Exception { } }) .childHandler(new ChannelInitializer<VirtualChannel>() { @Override public void initChannel(VirtualChannel ch) throws Exception { EventLoopContext context = vertx.createEventLoopContext(); VertxHandler<Http1xServerConnection> handler = VertxHandler.create(chctx -> { Http1xServerConnection conn = new Http1xServerConnection( () -> { ContextInternal internal = (ContextInternal) VertxContext .getOrCreateDuplicatedContext(context); setContextSafe(internal, true); return internal; }, null, new HttpServerOptions(), chctx, context, "localhost", null); conn.handler(ACTUAL_ROOT); return conn; }); ch.pipeline().addLast("handler", handler); } }); try { virtualBootstrapChannel = virtualBootstrap.bind(VIRTUAL_HTTP).sync(); } catch (InterruptedException e) { throw new RuntimeException("failed to bind virtual http"); } } public static Handler<HttpServerRequest> getRootHandler() { return ACTUAL_ROOT; } /** * used in the live reload handler to make sure the application has not been changed by another source (e.g. reactive * messaging) */ public static Object getCurrentApplicationState() { return rootHandler; } private static Handler<RoutingContext> configureAndGetBody(Optional<MemorySize> maxBodySize, BodyConfig bodyConfig) { BodyHandler bodyHandler = BodyHandler.create(); if (maxBodySize.isPresent()) { bodyHandler.setBodyLimit(maxBodySize.get().asLongValue()); } bodyHandler.setHandleFileUploads(bodyConfig.handleFileUploads); bodyHandler.setUploadsDirectory(bodyConfig.uploadsDirectory); bodyHandler.setDeleteUploadedFilesOnEnd(bodyConfig.deleteUploadedFilesOnEnd); bodyHandler.setMergeFormAttributes(bodyConfig.mergeFormAttributes); bodyHandler.setPreallocateBodyBuffer(bodyConfig.preallocateBodyBuffer); return new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { if (!Context.isOnEventLoopThread()) { ((ConnectionBase) event.request().connection()).channel().eventLoop().execute(new Runnable() { @Override public void run() { try { if (!event.request().isEnded()) { event.request().resume(); if (CAN_HAVE_BODY.contains(event.request().method())) { bodyHandler.handle(event); } else { event.next(); } } else { event.next(); } } catch (Throwable t) { event.fail(t); } } }); } else { if (!event.request().isEnded()) { event.request().resume(); } if (CAN_HAVE_BODY.contains(event.request().method())) { bodyHandler.handle(event); } else { event.next(); } } } }; } public Handler<RoutingContext> createBodyHandler() { Optional<MemorySize> maxBodySize = httpConfiguration.getValue().limits.maxBodySize; return configureAndGetBody(maxBodySize, httpConfiguration.getValue().body); } public Handler<RoutingContext> createBodyHandlerForManagementInterface() { Optional<MemorySize> maxBodySize = managementConfiguration.getValue().limits.maxBodySize; return configureAndGetBody(maxBodySize, managementConfiguration.getValue().body); } private static final List<HttpMethod> CAN_HAVE_BODY = Arrays.asList(HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH, HttpMethod.DELETE); private BiConsumer<Cookie, HttpServerRequest> processSameSiteConfig(Map<String, SameSiteCookieConfig> httpConfiguration) { List<BiFunction<Cookie, HttpServerRequest, Boolean>> functions = new ArrayList<>(); BiFunction<Cookie, HttpServerRequest, Boolean> last = null; for (Map.Entry<String, SameSiteCookieConfig> entry : new TreeMap<>(httpConfiguration).entrySet()) { Pattern p = Pattern.compile(entry.getKey(), entry.getValue().caseSensitive ? 0 : Pattern.CASE_INSENSITIVE); BiFunction<Cookie, HttpServerRequest, Boolean> biFunction = new BiFunction<Cookie, HttpServerRequest, Boolean>() { @Override public Boolean apply(Cookie cookie, HttpServerRequest request) { if (p.matcher(cookie.getName()).matches()) { if (entry.getValue().value == CookieSameSite.NONE) { if (entry.getValue().enableClientChecker) { String userAgent = request.getHeader(HttpHeaders.USER_AGENT); if (userAgent != null && SameSiteNoneIncompatibleClientChecker.isSameSiteNoneIncompatible(userAgent)) { return false; } } if (entry.getValue().addSecureForNone) { cookie.setSecure(true); } } cookie.setSameSite(entry.getValue().value); return true; } return false; } }; if (entry.getKey().equals(".*")) { last = biFunction; } else { functions.add(biFunction); } } if (last != null) { functions.add(last); } return new BiConsumer<Cookie, HttpServerRequest>() { @Override public void accept(Cookie cookie, HttpServerRequest request) { for (BiFunction<Cookie, HttpServerRequest, Boolean> i : functions) { if (i.apply(cookie, request)) { return; } } } }; } }
class VertxHttpRecorder { /** * The key that the request start time is stored under */ public static final String REQUEST_START_TIME = "io.quarkus.request-start-time"; public static final String MAX_REQUEST_SIZE_KEY = "io.quarkus.max-request-size"; private static final String DISABLE_WEBSOCKETS_PROP_NAME = "vertx.disableWebsockets"; /** * Order mark for route with priority over the default route (add an offset from this mark) **/ public static final int BEFORE_DEFAULT_ROUTE_ORDER_MARK = 1_000; /** * Default route order (i.e. Static Resources, Servlet) **/ public static final int DEFAULT_ROUTE_ORDER = 10_000; /** * Order mark for route without priority over the default route (add an offset from this mark) **/ public static final int AFTER_DEFAULT_ROUTE_ORDER_MARK = 20_000; private static final Logger LOGGER = Logger.getLogger(VertxHttpRecorder.class.getName()); private static volatile Handler<RoutingContext> hotReplacementHandler; private static volatile HotReplacementContext hotReplacementContext; private static volatile RemoteSyncHandler remoteSyncHandler; private static volatile Runnable closeTask; static volatile Handler<HttpServerRequest> rootHandler; private static volatile Handler<RoutingContext> nonApplicationRedirectHandler; private static volatile int actualHttpPort = -1; private static volatile int actualHttpsPort = -1; private static volatile int actualManagementPort = -1; public static final String GET = "GET"; private static final Handler<HttpServerRequest> ACTUAL_ROOT = new Handler<HttpServerRequest>() { /** JVM system property that disables URI validation, don't use this in production. */ private static final String DISABLE_URI_VALIDATION_PROP_NAME = "vertx.disableURIValidation"; /** * Disables HTTP headers validation, so we can save some processing and save some allocations. */ private final boolean DISABLE_URI_VALIDATION = Boolean.getBoolean(DISABLE_URI_VALIDATION_PROP_NAME); @Override public void handle(HttpServerRequest httpServerRequest) { if (!uriValid(httpServerRequest)) { httpServerRequest.response().setStatusCode(400).end(); return; } httpServerRequest.pause(); Handler<HttpServerRequest> rh = VertxHttpRecorder.rootHandler; if (rh != null) { rh.handle(httpServerRequest); } else { httpServerRequest.resume(); httpServerRequest.response().setStatusCode(503).end(); } } private boolean uriValid(HttpServerRequest httpServerRequest) { if (DISABLE_URI_VALIDATION) { return true; } try { new URI(httpServerRequest.uri()); return true; } catch (URISyntaxException e) { return false; } } }; private static HttpServerOptions httpMainSslServerOptions; private static HttpServerOptions httpMainServerOptions; private static HttpServerOptions httpMainDomainSocketOptions; private static HttpServerOptions httpManagementServerOptions; final HttpBuildTimeConfig httpBuildTimeConfig; final ManagementInterfaceBuildTimeConfig managementBuildTimeConfig; final RuntimeValue<HttpConfiguration> httpConfiguration; final RuntimeValue<ManagementInterfaceConfiguration> managementConfiguration; private static volatile Handler<HttpServerRequest> managementRouter; public VertxHttpRecorder(HttpBuildTimeConfig httpBuildTimeConfig, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, RuntimeValue<HttpConfiguration> httpConfiguration, RuntimeValue<ManagementInterfaceConfiguration> managementConfiguration) { this.httpBuildTimeConfig = httpBuildTimeConfig; this.httpConfiguration = httpConfiguration; this.managementBuildTimeConfig = managementBuildTimeConfig; this.managementConfiguration = managementConfiguration; } public static void setHotReplacement(Handler<RoutingContext> handler, HotReplacementContext hrc) { hotReplacementHandler = handler; hotReplacementContext = hrc; } public static void shutDownDevMode() { if (closeTask != null) { closeTask.run(); closeTask = null; } rootHandler = null; hotReplacementHandler = null; } public RuntimeValue<Router> initializeRouter(final Supplier<Vertx> vertxRuntimeValue) { Vertx vertx = vertxRuntimeValue.get(); Router router = Router.router(vertx); return new RuntimeValue<>(router); } public RuntimeValue<io.vertx.mutiny.ext.web.Router> createMutinyRouter(final RuntimeValue<Router> router) { return new RuntimeValue<>(new io.vertx.mutiny.ext.web.Router(router.getValue())); } public void startServer(Supplier<Vertx> vertx, ShutdownContext shutdown, LaunchMode launchMode, boolean startVirtual, boolean startSocket, Supplier<Integer> ioThreads, List<String> websocketSubProtocols, boolean auxiliaryApplication, boolean disableWebSockets) throws IOException { if (disableWebSockets && !System.getProperties().containsKey(DISABLE_WEBSOCKETS_PROP_NAME)) { System.setProperty(DISABLE_WEBSOCKETS_PROP_NAME, "true"); } if (startVirtual) { initializeVirtual(vertx.get()); shutdown.addShutdownTask(() -> { try { virtualBootstrapChannel.channel().close().sync(); } catch (InterruptedException e) { LOGGER.warn("Unable to close virtualBootstrapChannel"); } finally { virtualBootstrapChannel = null; virtualBootstrap = null; } }); } HttpConfiguration httpConfiguration = this.httpConfiguration.getValue(); ManagementInterfaceConfiguration managementConfig = this.managementConfiguration == null ? null : this.managementConfiguration.getValue(); if (startSocket && (httpConfiguration.hostEnabled || httpConfiguration.domainSocketEnabled || managementConfig.hostEnabled || managementConfig.domainSocketEnabled)) { if (closeTask == null) { doServerStart(vertx.get(), httpBuildTimeConfig, managementBuildTimeConfig, managementRouter, httpConfiguration, managementConfig, launchMode, ioThreads, websocketSubProtocols, auxiliaryApplication); if (launchMode != LaunchMode.DEVELOPMENT) { shutdown.addShutdownTask(closeTask); } else { shutdown.addShutdownTask(new Runnable() { @Override public void run() { VertxHttpHotReplacementSetup.handleDevModeRestart(); } }); } } } } public void mountFrameworkRouter(RuntimeValue<Router> mainRouter, RuntimeValue<Router> frameworkRouter, String frameworkPath) { mainRouter.getValue().mountSubRouter(frameworkPath, frameworkRouter.getValue()); } public void finalizeRouter(BeanContainer container, Consumer<Route> defaultRouteHandler, List<Filter> filterList, List<Filter> managementInterfaceFilterList, Supplier<Vertx> vertx, LiveReloadConfig liveReloadConfig, Optional<RuntimeValue<Router>> mainRouterRuntimeValue, RuntimeValue<Router> httpRouterRuntimeValue, RuntimeValue<io.vertx.mutiny.ext.web.Router> mutinyRouter, RuntimeValue<Router> frameworkRouter, RuntimeValue<Router> managementRouter, String rootPath, String nonRootPath, LaunchMode launchMode, boolean requireBodyHandler, Handler<RoutingContext> bodyHandler, GracefulShutdownFilter gracefulShutdownFilter, ShutdownConfig shutdownConfig, Executor executor) { HttpConfiguration httpConfiguration = this.httpConfiguration.getValue(); Router httpRouteRouter = httpRouterRuntimeValue.getValue(); Event<Object> event = Arc.container().beanManager().getEvent(); Filters filters = new Filters(); event.select(Filters.class).fire(filters); filterList.addAll(filters.getFilters()); event.select(Router.class, Default.Literal.INSTANCE).fire(httpRouteRouter); event.select(io.vertx.mutiny.ext.web.Router.class).fire(mutinyRouter.getValue()); for (Filter filter : filterList) { if (filter.getHandler() != null) { if (filter.isFailureHandler()) { httpRouteRouter.route().order(-1 * filter.getPriority()).failureHandler(filter.getHandler()); } else { httpRouteRouter.route().order(-1 * filter.getPriority()).handler(filter.getHandler()); } } } if (defaultRouteHandler != null) { defaultRouteHandler.accept(httpRouteRouter.route().order(DEFAULT_ROUTE_ORDER)); } applyCompression(httpBuildTimeConfig.enableCompression, httpRouteRouter); httpRouteRouter.route().last().failureHandler( new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault)); if (requireBodyHandler) { httpRouteRouter.route().order(Integer.MIN_VALUE + 1).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext routingContext) { routingContext.request().resume(); bodyHandler.handle(routingContext); } }); } HttpServerCommonHandlers.enforceMaxBodySize(httpConfiguration.limits, httpRouteRouter); var filtersInConfig = httpConfiguration.filter; HttpServerCommonHandlers.applyFilters(filtersInConfig, httpRouteRouter); HttpServerCommonHandlers.applyHeaders(httpConfiguration.header, httpRouteRouter); Handler<HttpServerRequest> root; if (rootPath.equals("/")) { if (hotReplacementHandler != null) { ClassLoader currentCl = Thread.currentThread().getContextClassLoader(); httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { Thread.currentThread().setContextClassLoader(currentCl); hotReplacementHandler.handle(event); } }); } root = httpRouteRouter; } else { Router mainRouter = mainRouterRuntimeValue.isPresent() ? mainRouterRuntimeValue.get().getValue() : Router.router(vertx.get()); mainRouter.mountSubRouter(rootPath, httpRouteRouter); if (hotReplacementHandler != null) { ClassLoader currentCl = Thread.currentThread().getContextClassLoader(); mainRouter.route().order(Integer.MIN_VALUE).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { Thread.currentThread().setContextClassLoader(currentCl); hotReplacementHandler.handle(event); } }); } root = mainRouter; } warnIfProxyAddressForwardingAllowedWithMultipleHeaders(httpConfiguration.proxy); root = HttpServerCommonHandlers.applyProxy(httpConfiguration.proxy, root, vertx); boolean quarkusWrapperNeeded = false; if (shutdownConfig.isShutdownTimeoutSet()) { gracefulShutdownFilter.next(root); root = gracefulShutdownFilter; quarkusWrapperNeeded = true; } AccessLogConfig accessLog = httpConfiguration.accessLog; if (accessLog.enabled) { AccessLogReceiver receiver; if (accessLog.logToFile) { File outputDir = accessLog.logDirectory.isPresent() ? new File(accessLog.logDirectory.get()) : new File(""); receiver = new DefaultAccessLogReceiver(executor, outputDir, accessLog.baseFileName, accessLog.logSuffix, accessLog.rotate); } else { receiver = new JBossLoggingAccessLogReceiver(accessLog.category); } AccessLogHandler handler = new AccessLogHandler(receiver, accessLog.pattern, getClass().getClassLoader(), accessLog.excludePattern); if (rootPath.equals("/") || nonRootPath.equals("/")) { mainRouterRuntimeValue.orElse(httpRouterRuntimeValue).getValue().route().order(Integer.MIN_VALUE) .handler(handler); } else if (nonRootPath.startsWith(rootPath)) { httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler); } else if (rootPath.startsWith(nonRootPath)) { frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler); } else { httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler); frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler); } quarkusWrapperNeeded = true; } BiConsumer<Cookie, HttpServerRequest> cookieFunction = null; if (!httpConfiguration.sameSiteCookie.isEmpty()) { cookieFunction = processSameSiteConfig(httpConfiguration.sameSiteCookie); quarkusWrapperNeeded = true; } BiConsumer<Cookie, HttpServerRequest> cookieConsumer = cookieFunction; if (quarkusWrapperNeeded) { Handler<HttpServerRequest> old = root; root = new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { old.handle(new QuarkusRequestWrapper(event, cookieConsumer)); } }; } Handler<HttpServerRequest> delegate = root; root = HttpServerCommonHandlers.enforceDuplicatedContext(delegate); if (httpConfiguration.recordRequestStartTime) { httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.put(REQUEST_START_TIME, System.nanoTime()); event.next(); } }); } if (launchMode == LaunchMode.DEVELOPMENT && liveReloadConfig.password.isPresent() && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) { root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext); } rootHandler = root; if (managementRouter != null && managementRouter.getValue() != null) { var mr = managementRouter.getValue(); mr.route().last().failureHandler( new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault)); mr.route().order(Integer.MIN_VALUE).handler(createBodyHandlerForManagementInterface()); mr.route().order(Integer.MIN_VALUE).handler(CorsHandler.create().addOrigin("*")); HttpServerCommonHandlers.applyFilters(managementConfiguration.getValue().filter, mr); for (Filter filter : managementInterfaceFilterList) { mr.route().order(filter.getPriority()).handler(filter.getHandler()); } HttpServerCommonHandlers.applyHeaders(managementConfiguration.getValue().header, mr); HttpServerCommonHandlers.enforceMaxBodySize(managementConfiguration.getValue().limits, mr); applyCompression(managementBuildTimeConfig.enableCompression, mr); Handler<HttpServerRequest> handler = HttpServerCommonHandlers.enforceDuplicatedContext(mr); handler = HttpServerCommonHandlers.applyProxy(managementConfiguration.getValue().proxy, handler, vertx); event.select(ManagementInterface.class).fire(new ManagementInterfaceImpl(managementRouter.getValue())); VertxHttpRecorder.managementRouter = handler; } } private void applyCompression(boolean enableCompression, Router httpRouteRouter) { if (enableCompression) { httpRouteRouter.route().order(0).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext ctx) { ctx.response().putHeader(HttpHeaders.CONTENT_ENCODING, HttpHeaders.IDENTITY); ctx.next(); } }); } } private void warnIfProxyAddressForwardingAllowedWithMultipleHeaders(ProxyConfig proxyConfig) { boolean proxyAddressForwardingActivated = proxyConfig.proxyAddressForwarding; boolean forwardedActivated = proxyConfig.allowForwarded; boolean xForwardedActivated = proxyConfig.allowXForwarded.orElse(!forwardedActivated); if (proxyAddressForwardingActivated && forwardedActivated && xForwardedActivated) { LOGGER.warn( "The X-Forwarded-* and Forwarded headers will be considered when determining the proxy address. " + "This configuration can cause a security issue as clients can forge requests and send a " + "forwarded header that is not overwritten by the proxy. " + "Please consider use one of these headers just to forward the proxy address in requests."); } } private static CompletableFuture<HttpServer> initializeManagementInterfaceWithDomainSocket(Vertx vertx, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler<HttpServerRequest> managementRouter, ManagementInterfaceConfiguration managementConfig, List<String> websocketSubProtocols) { CompletableFuture<HttpServer> managementInterfaceDomainSocketFuture = new CompletableFuture<>(); if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) { managementInterfaceDomainSocketFuture.complete(null); return managementInterfaceDomainSocketFuture; } HttpServerOptions domainSocketOptionsForManagement = createDomainSocketOptionsForManagementInterface( managementBuildTimeConfig, managementConfig, websocketSubProtocols); if (domainSocketOptionsForManagement != null) { vertx.createHttpServer(domainSocketOptionsForManagement) .requestHandler(managementRouter) .listen(ar -> { if (ar.failed()) { managementInterfaceDomainSocketFuture.completeExceptionally( new IllegalStateException( "Unable to start the management interface on the " + domainSocketOptionsForManagement.getHost() + " domain socket", ar.cause())); } else { managementInterfaceDomainSocketFuture.complete(ar.result()); } }); } else { managementInterfaceDomainSocketFuture.complete(null); } return managementInterfaceDomainSocketFuture; } private static CompletableFuture<HttpServer> initializeManagementInterface(Vertx vertx, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler<HttpServerRequest> managementRouter, ManagementInterfaceConfiguration managementConfig, LaunchMode launchMode, List<String> websocketSubProtocols) throws IOException { httpManagementServerOptions = null; CompletableFuture<HttpServer> managementInterfaceFuture = new CompletableFuture<>(); if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) { managementInterfaceFuture.complete(null); return managementInterfaceFuture; } HttpServerOptions httpServerOptionsForManagement = createHttpServerOptionsForManagementInterface( managementBuildTimeConfig, managementConfig, launchMode, websocketSubProtocols); httpManagementServerOptions = HttpServerOptionsUtils.createSslOptionsForManagementInterface( managementBuildTimeConfig, managementConfig, launchMode, websocketSubProtocols); if (httpManagementServerOptions != null && httpManagementServerOptions.getKeyCertOptions() == null) { httpManagementServerOptions = httpServerOptionsForManagement; } if (httpManagementServerOptions != null) { vertx.createHttpServer(httpManagementServerOptions) .requestHandler(managementRouter) .listen(ar -> { if (ar.failed()) { managementInterfaceFuture.completeExceptionally( new IllegalStateException("Unable to start the management interface", ar.cause())); } else { actualManagementPort = ar.result().actualPort(); managementInterfaceFuture.complete(ar.result()); } }); } else { managementInterfaceFuture.complete(null); } return managementInterfaceFuture; } private static CompletableFuture<String> initializeMainHttpServer(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig, HttpConfiguration httpConfiguration, LaunchMode launchMode, Supplier<Integer> eventLoops, List<String> websocketSubProtocols) throws IOException { if (!httpConfiguration.hostEnabled && !httpConfiguration.domainSocketEnabled) { return CompletableFuture.completedFuture(null); } httpMainServerOptions = createHttpServerOptions(httpBuildTimeConfig, httpConfiguration, launchMode, websocketSubProtocols); httpMainDomainSocketOptions = createDomainSocketOptions(httpBuildTimeConfig, httpConfiguration, websocketSubProtocols); HttpServerOptions tmpSslConfig = HttpServerOptionsUtils.createSslOptions(httpBuildTimeConfig, httpConfiguration, launchMode, websocketSubProtocols); if (Arc.container() != null) { List<InstanceHandle<HttpServerOptionsCustomizer>> instances = Arc.container() .listAll(HttpServerOptionsCustomizer.class); for (InstanceHandle<HttpServerOptionsCustomizer> instance : instances) { HttpServerOptionsCustomizer customizer = instance.get(); if (httpMainServerOptions != null) { customizer.customizeHttpServer(httpMainServerOptions); } if (tmpSslConfig != null) { customizer.customizeHttpsServer(tmpSslConfig); } if (httpMainDomainSocketOptions != null) { customizer.customizeDomainSocketServer(httpMainDomainSocketOptions); } } } if (tmpSslConfig != null && tmpSslConfig.getKeyCertOptions() == null) { tmpSslConfig = null; } httpMainSslServerOptions = tmpSslConfig; if (httpConfiguration.insecureRequests != HttpConfiguration.InsecureRequests.ENABLED && httpMainSslServerOptions == null) { throw new IllegalStateException("Cannot set quarkus.http.redirect-insecure-requests without enabling SSL."); } int eventLoopCount = eventLoops.get(); final int ioThreads; if (httpConfiguration.ioThreads.isPresent()) { ioThreads = Math.min(httpConfiguration.ioThreads.getAsInt(), eventLoopCount); } else if (launchMode.isDevOrTest()) { ioThreads = Math.min(2, eventLoopCount); } else { ioThreads = eventLoopCount; } CompletableFuture<String> futureResult = new CompletableFuture<>(); AtomicInteger connectionCount = new AtomicInteger(); vertx.deployVerticle(new Supplier<Verticle>() { @Override public Verticle get() { return new WebDeploymentVerticle(httpMainServerOptions, httpMainSslServerOptions, httpMainDomainSocketOptions, launchMode, httpConfiguration.insecureRequests, httpConfiguration, connectionCount); } }, new DeploymentOptions().setInstances(ioThreads), new Handler<AsyncResult<String>>() { @Override public void handle(AsyncResult<String> event) { if (event.failed()) { Throwable effectiveCause = event.cause(); if (effectiveCause instanceof BindException) { List<Integer> portsUsed = Collections.emptyList(); if ((httpMainSslServerOptions == null) && (httpMainServerOptions != null)) { portsUsed = List.of(httpMainServerOptions.getPort()); } else if ((httpConfiguration.insecureRequests == InsecureRequests.DISABLED) && (httpMainSslServerOptions != null)) { portsUsed = List.of(httpMainSslServerOptions.getPort()); } else if ((httpMainSslServerOptions != null) && (httpConfiguration.insecureRequests == InsecureRequests.ENABLED) && (httpMainServerOptions != null)) { portsUsed = List.of(httpMainServerOptions.getPort(), httpMainSslServerOptions.getPort()); } effectiveCause = new QuarkusBindException((BindException) effectiveCause, portsUsed); } futureResult.completeExceptionally(effectiveCause); } else { futureResult.complete(event.result()); } } }); return futureResult; } private static void doServerStart(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig, ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler<HttpServerRequest> managementRouter, HttpConfiguration httpConfiguration, ManagementInterfaceConfiguration managementConfig, LaunchMode launchMode, Supplier<Integer> eventLoops, List<String> websocketSubProtocols, boolean auxiliaryApplication) throws IOException { var mainServerFuture = initializeMainHttpServer(vertx, httpBuildTimeConfig, httpConfiguration, launchMode, eventLoops, websocketSubProtocols); var managementInterfaceFuture = initializeManagementInterface(vertx, managementBuildTimeConfig, managementRouter, managementConfig, launchMode, websocketSubProtocols); var managementInterfaceDomainSocketFuture = initializeManagementInterfaceWithDomainSocket(vertx, managementBuildTimeConfig, managementRouter, managementConfig, websocketSubProtocols); try { String deploymentIdIfAny = mainServerFuture.get(); HttpServer tmpManagementServer = null; HttpServer tmpManagementServerUsingDomainSocket = null; if (managementRouter != null) { tmpManagementServer = managementInterfaceFuture.get(); tmpManagementServerUsingDomainSocket = managementInterfaceDomainSocketFuture.get(); } HttpServer managementServer = tmpManagementServer; HttpServer managementServerDomainSocket = tmpManagementServerUsingDomainSocket; if (deploymentIdIfAny != null) { VertxCoreRecorder.setWebDeploymentId(deploymentIdIfAny); } closeTask = new Runnable() { @Override public synchronized void run() { if (closeTask == this) { boolean isVertxClose = ((VertxInternal) vertx).closeFuture().future().isComplete(); int count = 0; if (deploymentIdIfAny != null && vertx.deploymentIDs().contains(deploymentIdIfAny)) { count++; } if (managementServer != null && !isVertxClose) { count++; } if (managementServerDomainSocket != null && !isVertxClose) { count++; } CountDownLatch latch = new CountDownLatch(count); var handler = new Handler<AsyncResult<Void>>() { @Override public void handle(AsyncResult<Void> event) { latch.countDown(); } }; if (deploymentIdIfAny != null) { try { vertx.undeploy(deploymentIdIfAny, handler); } catch (Exception e) { if (e instanceof RejectedExecutionException) { LOGGER.debug("Failed to undeploy deployment because a task was rejected (due to shutdown)", e); } else { LOGGER.warn("Failed to undeploy deployment", e); } } } try { if (managementServer != null && !isVertxClose) { managementServer.close(handler); } if (managementServerDomainSocket != null && !isVertxClose) { managementServerDomainSocket.close(handler); } } catch (Exception e) { LOGGER.warn("Unable to shutdown the management interface quietly", e); } try { latch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } } closeTask = null; if (remoteSyncHandler != null) { remoteSyncHandler.close(); remoteSyncHandler = null; } } }; } catch (InterruptedException | ExecutionException e) { throw new RuntimeException("Unable to start HTTP server", e); } setHttpServerTiming(httpConfiguration.insecureRequests, httpMainServerOptions, httpMainSslServerOptions, httpMainDomainSocketOptions, auxiliaryApplication, httpManagementServerOptions); } private static void setHttpServerTiming(InsecureRequests insecureRequests, HttpServerOptions httpServerOptions, HttpServerOptions sslConfig, HttpServerOptions domainSocketOptions, boolean auxiliaryApplication, HttpServerOptions managementConfig) { StringBuilder serverListeningMessage = new StringBuilder("Listening on: "); int socketCount = 0; if (httpServerOptions != null && !InsecureRequests.DISABLED.equals(insecureRequests)) { serverListeningMessage.append(String.format( "http: socketCount++; } if (sslConfig != null) { if (socketCount > 0) { serverListeningMessage.append(" and "); } serverListeningMessage.append(String.format("https: socketCount++; } if (domainSocketOptions != null) { if (socketCount > 0) { serverListeningMessage.append(" and "); } serverListeningMessage.append(String.format("unix:%s", domainSocketOptions.getHost())); } if (managementConfig != null) { serverListeningMessage.append( String.format(". Management interface listening on http%s: managementConfig.getHost(), managementConfig.getPort())); } Timing.setHttpServer(serverListeningMessage.toString(), auxiliaryApplication); } private static HttpServerOptions createHttpServerOptions( HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration, LaunchMode launchMode, List<String> websocketSubProtocols) { if (!httpConfiguration.hostEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); int port = httpConfiguration.determinePort(launchMode); options.setPort(port == 0 ? -1 : port); HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); return options; } private static HttpServerOptions createHttpServerOptionsForManagementInterface( ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration, LaunchMode launchMode, List<String> websocketSubProtocols) { if (!httpConfiguration.hostEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); int port = httpConfiguration.determinePort(launchMode); options.setPort(port == 0 ? -1 : port); HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); return options; } private static HttpServerOptions createDomainSocketOptions( HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration, List<String> websocketSubProtocols) { if (!httpConfiguration.domainSocketEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); options.setHost(httpConfiguration.domainSocket); File file = new File(httpConfiguration.domainSocket); if (!file.getParentFile().canWrite()) { LOGGER.warnf( "Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.", httpConfiguration.domainSocket); } return options; } private static HttpServerOptions createDomainSocketOptionsForManagementInterface( ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration, List<String> websocketSubProtocols) { if (!httpConfiguration.domainSocketEnabled) { return null; } HttpServerOptions options = new HttpServerOptions(); HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration, websocketSubProtocols); options.setHost(httpConfiguration.domainSocket); File file = new File(httpConfiguration.domainSocket); if (!file.getParentFile().canWrite()) { LOGGER.warnf( "Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.", httpConfiguration.domainSocket); } return options; } public void addRoute(RuntimeValue<Router> router, Function<Router, Route> route, Handler<RoutingContext> handler, HandlerType type) { Route vr = route.apply(router.getValue()); if (type == HandlerType.BLOCKING) { vr.blockingHandler(handler, false); } else if (type == HandlerType.FAILURE) { vr.failureHandler(handler); } else { vr.handler(handler); } } public void setNonApplicationRedirectHandler(String nonApplicationPath, String rootPath) { nonApplicationRedirectHandler = new Handler<RoutingContext>() { @Override public void handle(RoutingContext context) { String absoluteURI = context.request().path(); String target = absoluteURI.substring(rootPath.length()); String redirectTo = nonApplicationPath + target; String query = context.request().query(); if (query != null && !query.isEmpty()) { redirectTo += '?' + query; } context.response() .setStatusCode(HttpResponseStatus.MOVED_PERMANENTLY.code()) .putHeader(HttpHeaderNames.LOCATION, redirectTo) .end(); } }; } public Handler<RoutingContext> getNonApplicationRedirectHandler() { return nonApplicationRedirectHandler; } public GracefulShutdownFilter createGracefulShutdownHandler() { return new GracefulShutdownFilter(); } private static class WebDeploymentVerticle extends AbstractVerticle implements Resource { private HttpServer httpServer; private HttpServer httpsServer; private HttpServer domainSocketServer; private final HttpServerOptions httpOptions; private final HttpServerOptions httpsOptions; private final HttpServerOptions domainSocketOptions; private final LaunchMode launchMode; private volatile boolean clearHttpProperty = false; private volatile boolean clearHttpsProperty = false; private volatile PortSystemProperties portSystemProperties; private final HttpConfiguration.InsecureRequests insecureRequests; private final HttpConfiguration quarkusConfig; private final AtomicInteger connectionCount; public WebDeploymentVerticle(HttpServerOptions httpOptions, HttpServerOptions httpsOptions, HttpServerOptions domainSocketOptions, LaunchMode launchMode, InsecureRequests insecureRequests, HttpConfiguration quarkusConfig, AtomicInteger connectionCount) { this.httpOptions = httpOptions; this.httpsOptions = httpsOptions; this.launchMode = launchMode; this.domainSocketOptions = domainSocketOptions; this.insecureRequests = insecureRequests; this.quarkusConfig = quarkusConfig; this.connectionCount = connectionCount; org.crac.Core.getGlobalContext().register(this); } @Override public void start(Promise<Void> startFuture) { final AtomicInteger remainingCount = new AtomicInteger(0); boolean httpServerEnabled = httpOptions != null && insecureRequests != HttpConfiguration.InsecureRequests.DISABLED; if (httpServerEnabled) { remainingCount.incrementAndGet(); } if (httpsOptions != null) { remainingCount.incrementAndGet(); } if (domainSocketOptions != null) { remainingCount.incrementAndGet(); } if (remainingCount.get() == 0) { startFuture .fail(new IllegalArgumentException("Must configure at least one of http, https or unix domain socket")); } if (httpServerEnabled) { httpServer = vertx.createHttpServer(httpOptions); if (insecureRequests == HttpConfiguration.InsecureRequests.ENABLED) { httpServer.requestHandler(ACTUAL_ROOT); } else { httpServer.requestHandler(new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest req) { try { String host = req.getHeader(HttpHeaderNames.HOST); if (host == null) { req.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end(); } else { int includedPort = host.indexOf(":"); if (includedPort != -1) { host = host.substring(0, includedPort); } req.response() .setStatusCode(301) .putHeader("Location", "https: .end(); } } catch (Exception e) { req.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end(); } } }); } setupTcpHttpServer(httpServer, httpOptions, false, startFuture, remainingCount, connectionCount); } if (domainSocketOptions != null) { domainSocketServer = vertx.createHttpServer(domainSocketOptions); domainSocketServer.requestHandler(ACTUAL_ROOT); setupUnixDomainSocketHttpServer(domainSocketServer, domainSocketOptions, startFuture, remainingCount); } if (httpsOptions != null) { httpsServer = vertx.createHttpServer(httpsOptions); httpsServer.requestHandler(ACTUAL_ROOT); setupTcpHttpServer(httpsServer, httpsOptions, true, startFuture, remainingCount, connectionCount); } } private void setupUnixDomainSocketHttpServer(HttpServer httpServer, HttpServerOptions options, Promise<Void> startFuture, AtomicInteger remainingCount) { httpServer.listen(SocketAddress.domainSocketAddress(options.getHost()), event -> { if (event.succeeded()) { if (remainingCount.decrementAndGet() == 0) { startFuture.complete(null); } } else { if (event.cause() != null && event.cause().getMessage() != null && event.cause().getMessage().contains("Permission denied")) { startFuture.fail(new IllegalStateException( String.format( "Unable to bind to Unix domain socket (%s) as the application does not have the permission to write in the directory.", domainSocketOptions.getHost()))); } else if (event.cause() instanceof IllegalArgumentException) { startFuture.fail(new IllegalArgumentException( String.format( "Unable to bind to Unix domain socket. Consider adding the 'io.netty:%s' dependency. See the Quarkus Vert.x reference guide for more details.", Utils.isLinux() ? "netty-transport-native-epoll" : "netty-transport-native-kqueue"))); } else { startFuture.fail(event.cause()); } } }); } private void setupTcpHttpServer(HttpServer httpServer, HttpServerOptions options, boolean https, Promise<Void> startFuture, AtomicInteger remainingCount, AtomicInteger currentConnectionCount) { if (quarkusConfig.limits.maxConnections.isPresent() && quarkusConfig.limits.maxConnections.getAsInt() > 0) { final int maxConnections = quarkusConfig.limits.maxConnections.getAsInt(); httpServer.connectionHandler(new Handler<HttpConnection>() { @Override public void handle(HttpConnection event) { int current; do { current = currentConnectionCount.get(); if (current == maxConnections) { LOGGER.debug("Rejecting connection as there are too many active connections"); event.close(); return; } } while (!currentConnectionCount.compareAndSet(current, current + 1)); event.closeHandler(new Handler<Void>() { @Override public void handle(Void event) { LOGGER.debug("Connection closed"); connectionCount.decrementAndGet(); } }); } }); } httpServer.listen(options.getPort(), options.getHost(), new Handler<>() { @Override public void handle(AsyncResult<HttpServer> event) { if (event.cause() != null) { startFuture.fail(event.cause()); } else { int actualPort = event.result().actualPort(); if (https) { actualHttpsPort = actualPort; } else { actualHttpPort = actualPort; } if (actualPort != options.getPort()) { String schema; if (https) { clearHttpsProperty = true; schema = "https"; } else { clearHttpProperty = true; actualHttpPort = actualPort; schema = "http"; } portSystemProperties = new PortSystemProperties(); portSystemProperties.set(schema, actualPort, launchMode); } if (remainingCount.decrementAndGet() == 0) { startFuture.complete(null); } } } }); } @Override public void stop(Promise<Void> stopFuture) { final AtomicInteger remainingCount = new AtomicInteger(0); if (httpServer != null) { remainingCount.incrementAndGet(); } if (httpsServer != null) { remainingCount.incrementAndGet(); } if (domainSocketServer != null) { remainingCount.incrementAndGet(); } Handler<AsyncResult<Void>> handleClose = event -> { if (remainingCount.decrementAndGet() == 0) { if (clearHttpProperty) { String portPropertyName = launchMode == LaunchMode.TEST ? "quarkus.http.test-port" : "quarkus.http.port"; System.clearProperty(portPropertyName); if (launchMode.isDevOrTest()) { System.clearProperty(propertyWithProfilePrefix(portPropertyName)); } } if (clearHttpsProperty) { String portPropertyName = launchMode == LaunchMode.TEST ? "quarkus.http.test-ssl-port" : "quarkus.http.ssl-port"; System.clearProperty(portPropertyName); if (launchMode.isDevOrTest()) { System.clearProperty(propertyWithProfilePrefix(portPropertyName)); } } if (portSystemProperties != null) { portSystemProperties.restore(); } stopFuture.complete(); } }; if (httpServer != null) { httpServer.close(handleClose); } if (httpsServer != null) { httpsServer.close(handleClose); } if (domainSocketServer != null) { domainSocketServer.close(handleClose); } } private String propertyWithProfilePrefix(String portPropertyName) { return "%" + launchMode.getDefaultProfile() + "." + portPropertyName; } @Override public void beforeCheckpoint(org.crac.Context<? extends Resource> context) throws Exception { Promise<Void> p = Promise.promise(); stop(p); CountDownLatch latch = new CountDownLatch(1); p.future().onComplete(event -> latch.countDown()); latch.await(); } @Override public void afterRestore(org.crac.Context<? extends Resource> context) throws Exception { Promise<Void> p = Promise.promise(); start(p); CountDownLatch latch = new CountDownLatch(1); p.future().onComplete(event -> latch.countDown()); latch.await(); } } protected static ServerBootstrap virtualBootstrap; protected static ChannelFuture virtualBootstrapChannel; public static VirtualAddress VIRTUAL_HTTP = new VirtualAddress("netty-virtual-http"); private static void initializeVirtual(Vertx vertxRuntime) { if (virtualBootstrap != null) { return; } VertxInternal vertx = (VertxInternal) vertxRuntime; virtualBootstrap = new ServerBootstrap(); virtualBootstrap.group(vertx.getEventLoopGroup()) .channel(VirtualServerChannel.class) .handler(new ChannelInitializer<VirtualServerChannel>() { @Override public void initChannel(VirtualServerChannel ch) throws Exception { } }) .childHandler(new ChannelInitializer<VirtualChannel>() { @Override public void initChannel(VirtualChannel ch) throws Exception { EventLoopContext context = vertx.createEventLoopContext(); VertxHandler<Http1xServerConnection> handler = VertxHandler.create(chctx -> { Http1xServerConnection conn = new Http1xServerConnection( () -> { ContextInternal internal = (ContextInternal) VertxContext .getOrCreateDuplicatedContext(context); setContextSafe(internal, true); return internal; }, null, new HttpServerOptions(), chctx, context, "localhost", null); conn.handler(ACTUAL_ROOT); return conn; }); ch.pipeline().addLast("handler", handler); } }); try { virtualBootstrapChannel = virtualBootstrap.bind(VIRTUAL_HTTP).sync(); } catch (InterruptedException e) { throw new RuntimeException("failed to bind virtual http"); } } public static Handler<HttpServerRequest> getRootHandler() { return ACTUAL_ROOT; } /** * used in the live reload handler to make sure the application has not been changed by another source (e.g. reactive * messaging) */ public static Object getCurrentApplicationState() { return rootHandler; } private static Handler<RoutingContext> configureAndGetBody(Optional<MemorySize> maxBodySize, BodyConfig bodyConfig) { BodyHandler bodyHandler = BodyHandler.create(); if (maxBodySize.isPresent()) { bodyHandler.setBodyLimit(maxBodySize.get().asLongValue()); } bodyHandler.setHandleFileUploads(bodyConfig.handleFileUploads); bodyHandler.setUploadsDirectory(bodyConfig.uploadsDirectory); bodyHandler.setDeleteUploadedFilesOnEnd(bodyConfig.deleteUploadedFilesOnEnd); bodyHandler.setMergeFormAttributes(bodyConfig.mergeFormAttributes); bodyHandler.setPreallocateBodyBuffer(bodyConfig.preallocateBodyBuffer); return new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { if (!Context.isOnEventLoopThread()) { ((ConnectionBase) event.request().connection()).channel().eventLoop().execute(new Runnable() { @Override public void run() { try { if (!event.request().isEnded()) { event.request().resume(); if (CAN_HAVE_BODY.contains(event.request().method())) { bodyHandler.handle(event); } else { event.next(); } } else { event.next(); } } catch (Throwable t) { event.fail(t); } } }); } else { if (!event.request().isEnded()) { event.request().resume(); } if (CAN_HAVE_BODY.contains(event.request().method())) { bodyHandler.handle(event); } else { event.next(); } } } }; } public Handler<RoutingContext> createBodyHandler() { Optional<MemorySize> maxBodySize = httpConfiguration.getValue().limits.maxBodySize; return configureAndGetBody(maxBodySize, httpConfiguration.getValue().body); } public Handler<RoutingContext> createBodyHandlerForManagementInterface() { Optional<MemorySize> maxBodySize = managementConfiguration.getValue().limits.maxBodySize; return configureAndGetBody(maxBodySize, managementConfiguration.getValue().body); } private static final List<HttpMethod> CAN_HAVE_BODY = Arrays.asList(HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH, HttpMethod.DELETE); private BiConsumer<Cookie, HttpServerRequest> processSameSiteConfig(Map<String, SameSiteCookieConfig> httpConfiguration) { List<BiFunction<Cookie, HttpServerRequest, Boolean>> functions = new ArrayList<>(); BiFunction<Cookie, HttpServerRequest, Boolean> last = null; for (Map.Entry<String, SameSiteCookieConfig> entry : new TreeMap<>(httpConfiguration).entrySet()) { Pattern p = Pattern.compile(entry.getKey(), entry.getValue().caseSensitive ? 0 : Pattern.CASE_INSENSITIVE); BiFunction<Cookie, HttpServerRequest, Boolean> biFunction = new BiFunction<Cookie, HttpServerRequest, Boolean>() { @Override public Boolean apply(Cookie cookie, HttpServerRequest request) { if (p.matcher(cookie.getName()).matches()) { if (entry.getValue().value == CookieSameSite.NONE) { if (entry.getValue().enableClientChecker) { String userAgent = request.getHeader(HttpHeaders.USER_AGENT); if (userAgent != null && SameSiteNoneIncompatibleClientChecker.isSameSiteNoneIncompatible(userAgent)) { return false; } } if (entry.getValue().addSecureForNone) { cookie.setSecure(true); } } cookie.setSameSite(entry.getValue().value); return true; } return false; } }; if (entry.getKey().equals(".*")) { last = biFunction; } else { functions.add(biFunction); } } if (last != null) { functions.add(last); } return new BiConsumer<Cookie, HttpServerRequest>() { @Override public void accept(Cookie cookie, HttpServerRequest request) { for (BiFunction<Cookie, HttpServerRequest, Boolean> i : functions) { if (i.apply(cookie, request)) { return; } } } }; } }
Will this logic work for subtypes and referedTypes?
private boolean getTypeEquality(BType typeA, BType typeB) { return typeA.tag == typeB.tag && Objects.equals(typeA.name, typeB.name); }
return typeA.tag == typeB.tag && Objects.equals(typeA.name, typeB.name);
private boolean getTypeEquality(BType typeA, BType typeB) { return types.isAssignable(typeA, typeB) || types.isAssignable(typeB, typeA); }
class DataflowAnalyzer extends BLangNodeVisitor { private final SymbolResolver symResolver; private final Names names; private SymbolEnv env; private SymbolTable symTable; private BLangDiagnosticLog dlog; private Types types; private Map<BSymbol, InitStatus> uninitializedVars; private Map<BSymbol, Location> unusedErrorVarsDeclaredWithVar; private Map<BSymbol, Location> unusedLocalVariables; private Map<BSymbol, Set<BSymbol>> globalNodeDependsOn; private Map<BSymbol, Set<BSymbol>> functionToDependency; private boolean flowTerminated = false; private static final CompilerContext.Key<DataflowAnalyzer> DATAFLOW_ANALYZER_KEY = new CompilerContext.Key<>(); private Deque<BSymbol> currDependentSymbolDeque; private final GlobalVariableRefAnalyzer globalVariableRefAnalyzer; private DataflowAnalyzer(CompilerContext context) { context.put(DATAFLOW_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.types = Types.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.currDependentSymbolDeque = new ArrayDeque<>(); this.globalVariableRefAnalyzer = GlobalVariableRefAnalyzer.getInstance(context); this.unusedLocalVariables = new HashMap<>(); } public static DataflowAnalyzer getInstance(CompilerContext context) { DataflowAnalyzer dataflowAnalyzer = context.get(DATAFLOW_ANALYZER_KEY); if (dataflowAnalyzer == null) { dataflowAnalyzer = new DataflowAnalyzer(context); } return dataflowAnalyzer; } /** * Perform data-flow analysis on a package. * * @param pkgNode Package to perform data-flow analysis. * @return Data-flow analyzed package */ public BLangPackage analyze(BLangPackage pkgNode) { this.uninitializedVars = new LinkedHashMap<>(); this.globalNodeDependsOn = new LinkedHashMap<>(); this.functionToDependency = new HashMap<>(); this.dlog.setCurrentPackageId(pkgNode.packageID); SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); analyzeNode(pkgNode, pkgEnv); return pkgNode; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.DATAFLOW_ANALYZE)) { return; } Map<BSymbol, Location> prevUnusedErrorVarsDeclaredWithVar = this.unusedErrorVarsDeclaredWithVar; this.unusedErrorVarsDeclaredWithVar = new HashMap<>(); Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); List<TopLevelNode> sortedListOfNodes = new ArrayList<>(pkgNode.globalVars); addModuleInitToSortedNodeList(pkgNode, sortedListOfNodes); addNodesToSortedNodeList(pkgNode, sortedListOfNodes); for (TopLevelNode topLevelNode : sortedListOfNodes) { if (isModuleInitFunction((BLangNode) topLevelNode)) { analyzeModuleInitFunc((BLangFunction) topLevelNode); } else { if (topLevelNode.getKind() == NodeKind.CLASS_DEFN) { BLangClassDefinition classDef = (BLangClassDefinition) topLevelNode; if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) { continue; } } analyzeNode((BLangNode) topLevelNode, env); } } checkForUninitializedGlobalVars(pkgNode.globalVars); pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage)); this.globalVariableRefAnalyzer.analyzeAndReOrder(pkgNode, this.globalNodeDependsOn); this.globalVariableRefAnalyzer.populateFunctionDependencies(this.functionToDependency, pkgNode.globalVars); pkgNode.globalVariableDependencies = globalVariableRefAnalyzer.getGlobalVariablesDependsOn(); checkUnusedImports(pkgNode.imports); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; checkUnusedErrorVarsDeclaredWithVar(); this.unusedErrorVarsDeclaredWithVar = prevUnusedErrorVarsDeclaredWithVar; pkgNode.completedPhases.add(CompilerPhase.DATAFLOW_ANALYZE); } private void addModuleInitToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) { for (TopLevelNode node : pkgNode.topLevelNodes) { if (isModuleInitFunction((BLangNode) node)) { sortedListOfNodes.add(node); break; } } } private void addNodesToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) { pkgNode.topLevelNodes.forEach(topLevelNode -> { if (!sortedListOfNodes.contains(topLevelNode)) { sortedListOfNodes.add(topLevelNode); } }); } private boolean isModuleInitFunction(BLangNode node) { return node.getKind() == NodeKind.FUNCTION && Names.USER_DEFINED_INIT_SUFFIX.value.equals(((BLangFunction) node).name.value); } private void analyzeModuleInitFunc(BLangFunction funcNode) { Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); this.currDependentSymbolDeque.push(funcNode.symbol); SymbolEnv moduleInitFuncEnv = SymbolEnv.createModuleInitFunctionEnv(funcNode, funcNode.symbol.scope, env); for (BLangAnnotationAttachment bLangAnnotationAttachment : funcNode.annAttachments) { analyzeNode(bLangAnnotationAttachment.expr, env); } analyzeNode(funcNode.body, moduleInitFuncEnv); this.currDependentSymbolDeque.pop(); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; } private void checkForUninitializedGlobalVars(List<BLangVariable> globalVars) { for (BLangVariable globalVar : globalVars) { if (globalVar.getKind() == NodeKind.VARIABLE && this.uninitializedVars.containsKey(globalVar.symbol)) { this.dlog.error(globalVar.pos, DiagnosticErrorCode.UNINITIALIZED_VARIABLE, globalVar.symbol); } } } @Override public void visit(BLangResourceFunction funcNode) { visit((BLangFunction) funcNode); } @Override public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); this.currDependentSymbolDeque.push(funcNode.symbol); funcNode.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env)); funcNode.requiredParams.forEach(param -> analyzeNode(param, funcEnv)); analyzeNode(funcNode.restParam, funcEnv); if (funcNode.flagSet.contains(Flag.OBJECT_CTOR)) { visitFunctionBodyWithDynamicEnv(funcNode, funcEnv); } else { analyzeBranch(funcNode.body, funcEnv); } this.currDependentSymbolDeque.pop(); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; } private void visitFunctionBodyWithDynamicEnv(BLangFunction funcNode, SymbolEnv funcEnv) { Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); this.unusedLocalVariables.putAll(prevUnusedLocalVariables); Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars; this.uninitializedVars = copyUninitializedVars(); this.flowTerminated = false; analyzeNode(funcNode.body, funcEnv); this.uninitializedVars = prevUninitializedVars; prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol)); this.unusedLocalVariables.keySet().removeAll(prevUnusedLocalVariables.keySet()); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); bodyEnv.isModuleInit = env.isModuleInit; for (BLangStatement statement : body.stmts) { analyzeNode(statement, bodyEnv); } } @Override public void visit(BLangExprFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); analyzeNode(body.expr, bodyEnv); } @Override public void visit(BLangExternalFunctionBody body) { } @Override public void visit(BLangBlockStmt blockNode) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); blockNode.stmts.forEach(statement -> analyzeNode(statement, blockEnv)); } @Override public void visit(BLangLetExpression letExpression) { for (BLangLetVariable letVarDeclaration : letExpression.letVarDeclarations) { analyzeNode((BLangNode) letVarDeclaration.definitionNode, letExpression.env); } analyzeNode(letExpression.expr, letExpression.env); } @Override public void visit(BLangCompilationUnit compUnit) { } @Override public void visit(BLangXMLNS xmlnsNode) { } @Override public void visit(BLangService service) { this.currDependentSymbolDeque.push(service.serviceClass.symbol); visit(service.serviceClass); for (BLangExpression attachedExpr : service.attachedExprs) { analyzeNode(attachedExpr, env); } service.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env)); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangTypeDefinition typeDefinition) { SymbolEnv typeDefEnv; BSymbol symbol = typeDefinition.symbol; if (typeDefinition.symbol.kind == SymbolKind.TYPE_DEF) { symbol = symbol.type.tsymbol; } typeDefEnv = SymbolEnv.createTypeEnv(typeDefinition.typeNode, symbol.scope, env); this.currDependentSymbolDeque.push(symbol); analyzeNode(typeDefinition.typeNode, typeDefEnv); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangClassDefinition classDef) { SymbolEnv preEnv = env; SymbolEnv env = this.env; Map<BSymbol, Location> prevUnusedLocalVariables = null; Map<BSymbol, InitStatus> prevUninitializedVars = null; boolean visitedOCE = false; if (classDef.flagSet.contains(Flag.OBJECT_CTOR) && classDef.oceEnvData.capturedClosureEnv != null && classDef.oceEnvData.capturedClosureEnv.enclEnv != null) { env = classDef.oceEnvData.capturedClosureEnv.enclEnv; prevUnusedLocalVariables = this.unusedLocalVariables; prevUninitializedVars = this.uninitializedVars; this.unusedLocalVariables = new HashMap<>(); this.unusedLocalVariables.putAll(prevUnusedLocalVariables); this.uninitializedVars = copyUninitializedVars(); this.flowTerminated = false; visitedOCE = true; } SymbolEnv objectEnv = SymbolEnv.createClassEnv(classDef, classDef.symbol.scope, env); this.currDependentSymbolDeque.push(classDef.symbol); for (BLangAnnotationAttachment bLangAnnotationAttachment : classDef.annAttachments) { analyzeNode(bLangAnnotationAttachment.expr, env); } classDef.fields.forEach(field -> analyzeNode(field, objectEnv)); classDef.referencedFields.forEach(field -> analyzeNode(field, objectEnv)); if (classDef.initFunction != null) { if (classDef.initFunction.body == null) { Optional<BLangFunction> outerFuncDef = objectEnv.enclPkg.functions.stream() .filter(f -> f.symbol.name.equals((classDef.initFunction).symbol.name)) .findFirst(); outerFuncDef.ifPresent(bLangFunction -> classDef.initFunction = bLangFunction); } if (classDef.initFunction.body != null) { Map<BSymbol, Location> prevUnusedLocalVars = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); if (classDef.initFunction.body.getKind() == NodeKind.BLOCK_FUNCTION_BODY) { for (BLangStatement statement : ((BLangBlockFunctionBody) classDef.initFunction.body).stmts) { analyzeNode(statement, objectEnv); } } else if (classDef.initFunction.body.getKind() == NodeKind.EXPR_FUNCTION_BODY) { analyzeNode(((BLangExprFunctionBody) classDef.initFunction.body).expr, objectEnv); } emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVars; } } Stream.concat(classDef.fields.stream(), classDef.referencedFields.stream()) .map(field -> { addTypeDependency(classDef.symbol, field.getBType(), new HashSet<>()); return field; }) .filter(field -> !Symbols.isPrivate(field.symbol)) .forEach(field -> { if (this.uninitializedVars.containsKey(field.symbol)) { this.dlog.error(field.pos, DiagnosticErrorCode.OBJECT_UNINITIALIZED_FIELD, field.symbol); } }); for (BLangFunction function : classDef.functions) { analyzeNode(function, env); } for (BLangType type : classDef.typeRefs) { analyzeNode(type, env); } this.env = preEnv; if (visitedOCE) { this.uninitializedVars = prevUninitializedVars; prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol)); this.unusedLocalVariables = prevUnusedLocalVariables; } this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangObjectConstructorExpression objectConstructorExpression) { BLangClassDefinition classDef = objectConstructorExpression.classNode; if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) { OCEDynamicEnvironmentData oceData = classDef.oceEnvData; for (BSymbol symbol : oceData.closureFuncSymbols) { this.unusedLocalVariables.remove(symbol); } for (BSymbol symbol : oceData.closureBlockSymbols) { this.unusedLocalVariables.remove(symbol); } } visit(objectConstructorExpression.classNode); visit(objectConstructorExpression.typeInit); addDependency(objectConstructorExpression.getBType().tsymbol, objectConstructorExpression.classNode.symbol); } @Override public void visit(BLangSimpleVariableDef varDefNode) { BLangSimpleVariable var = varDefNode.var; if (var.expr == null) { addUninitializedVar(var); analyzeNode(var.typeNode, env); BVarSymbol symbol = var.symbol; if (var.getKind() == NodeKind.VARIABLE && isLocalVariableDefinedWithNonWildCardBindingPattern(var)) { this.unusedLocalVariables.put(symbol, var.pos); } return; } analyzeNode(var, env); } @Override public void visit(BLangSimpleVariable variable) { BVarSymbol symbol = variable.symbol; analyzeNode(variable.typeNode, env); if (symbol == null) { if (variable.expr != null) { analyzeNode(variable.expr, env); } return; } this.currDependentSymbolDeque.push(symbol); if (variable.typeNode != null && variable.typeNode.getBType() != null) { BType type = variable.typeNode.getBType(); recordGlobalVariableReferenceRelationship(Types.getReferredType(type).tsymbol); } boolean withInModuleVarLetExpr = symbol.owner.tag == SymTag.LET && isGlobalVarSymbol(env.enclVarSym); if (withInModuleVarLetExpr) { BVarSymbol dependentVar = env.enclVarSym; this.currDependentSymbolDeque.push(dependentVar); } try { boolean varWithInferredTypeIncludingError = false; if (variable.isDeclaredWithVar) { varWithInferredTypeIncludingError = addVarIfInferredTypeIncludesError(variable); } if (!varWithInferredTypeIncludingError && isLocalVariableDefinedWithNonWildCardBindingPattern(variable) && !isVariableDeclaredForWorkerDeclaration(variable)) { this.unusedLocalVariables.put(symbol, variable.pos); } if (variable.expr != null) { analyzeNode(variable.expr, env); this.uninitializedVars.remove(symbol); return; } long varFlags = symbol.flags; if (Symbols.isFlagOn(varFlags, Flags.CONFIGURABLE) && Symbols.isFlagOn(varFlags, Flags.REQUIRED)) { return; } BSymbol owner = symbol.owner; if (owner.tag != SymTag.PACKAGE && owner.tag != SymTag.OBJECT) { return; } addUninitializedVar(variable); } finally { if (withInModuleVarLetExpr) { this.currDependentSymbolDeque.pop(); } this.currDependentSymbolDeque.pop(); } } private boolean isVariableDeclaredForWorkerDeclaration(BLangSimpleVariable variable) { BLangExpression expr = variable.expr; if (expr == null) { return false; } if (Symbols.isFlagOn(variable.symbol.flags, Flags.WORKER)) { return true; } return expr.getKind() == NodeKind.LAMBDA && ((BLangLambdaFunction) expr).function.flagSet.contains(Flag.WORKER); } @Override public void visit(BLangAssignment assignment) { analyzeNode(assignment.expr, env); checkAssignment(assignment.varRef); } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { analyzeNode(compoundAssignNode.expr, env); analyzeNode(compoundAssignNode.varRef, env); checkAssignment(compoundAssignNode.varRef); this.uninitializedVars.remove(compoundAssignNode.varRef.symbol); } @Override public void visit(BLangBreak breakNode) { terminateFlow(); } @Override public void visit(BLangReturn returnNode) { analyzeNode(returnNode.expr, env); terminateFlow(); } @Override public void visit(BLangXMLNSStatement xmlnsStmt) { analyzeNode(xmlnsStmt.xmlnsDecl, env); } @Override public void visit(BLangIf ifNode) { analyzeNode(ifNode.expr, env); BranchResult ifResult = analyzeBranch(ifNode.body, env); BranchResult elseResult = analyzeBranch(ifNode.elseStmt, env); if (ifResult.flowTerminated) { this.uninitializedVars = elseResult.uninitializedVars; return; } if (elseResult.flowTerminated || ConditionResolver.checkConstCondition(types, symTable, ifNode.expr) == symTable.trueType) { this.uninitializedVars = ifResult.uninitializedVars; return; } this.uninitializedVars = mergeUninitializedVars(ifResult.uninitializedVars, elseResult.uninitializedVars); } @Override public void visit(BLangMatchStatement matchStatement) { analyzeNode(matchStatement.expr, env); if (matchStatement.onFailClause != null) { analyzeNode(matchStatement.onFailClause, env); } Map<BSymbol, InitStatus> uninitVars = new HashMap<>(); BranchResult lastPatternResult = null; for (int i = 0; i < matchStatement.getMatchClauses().size(); i++) { BLangMatchClause matchClause = matchStatement.getMatchClauses().get(i); if (isLastPatternContainsIn(matchClause)) { lastPatternResult = analyzeBranch(matchClause, env); } else { BranchResult result = analyzeBranch(matchClause, env); if (result.flowTerminated) { continue; } uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars); } } if (lastPatternResult != null) { uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars); this.uninitializedVars = uninitVars; return; } uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars); this.uninitializedVars = uninitVars; } @Override public void visit(BLangMatchClause matchClause) { Location pos = matchClause.pos; for (BVarSymbol symbol : matchClause.declaredVars.values()) { if (!isWildCardBindingPattern(symbol)) { this.unusedLocalVariables.put(symbol, pos); } } analyzeNode(matchClause.matchGuard, env); analyzeNode(matchClause.blockStmt, env); } @Override public void visit(BLangMatchGuard matchGuard) { analyzeNode(matchGuard.expr, env); } private boolean isLastPatternContainsIn(BLangMatchClause matchClause) { for (BLangMatchPattern pattern : matchClause.matchPatterns) { if (pattern.isLastPattern) { return true; } } return false; } @Override public void visit(BLangMatch match) { analyzeNode(match.expr, env); if (match.onFailClause != null) { analyzeNode(match.onFailClause, env); } Map<BSymbol, InitStatus> uninitVars = new HashMap<>(); BranchResult lastPatternResult = null; for (BLangMatch.BLangMatchBindingPatternClause patternClause : match.patternClauses) { if (patternClause.isLastPattern) { lastPatternResult = analyzeBranch(patternClause, env); } else { BranchResult result = analyzeBranch(patternClause, env); if (result.flowTerminated) { continue; } uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars); } } if (lastPatternResult != null) { uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars); this.uninitializedVars = uninitVars; return; } uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars); this.uninitializedVars = uninitVars; } @Override public void visit(BLangForeach foreach) { BLangExpression collection = foreach.collection; if (isNotRangeExpr(collection)) { populateUnusedVariableMapForMembers(this.unusedLocalVariables, (BLangVariable) foreach.variableDefinitionNode.getVariable()); } analyzeNode(collection, env); analyzeNode(foreach.body, env); if (foreach.onFailClause != null) { analyzeNode(foreach.onFailClause, env); } } @Override public void visit(BLangQueryAction queryAction) { for (BLangNode clause : queryAction.getQueryClauses()) { analyzeNode(clause, env); } } @Override public void visit(BLangWhile whileNode) { Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars; analyzeNode(whileNode.expr, env); BranchResult whileResult = analyzeBranch(whileNode.body, env); if (whileNode.onFailClause != null) { analyzeNode(whileNode.onFailClause, env); } BType constCondition = ConditionResolver.checkConstCondition(types, symTable, whileNode.expr); if (constCondition == symTable.falseType) { this.uninitializedVars = prevUninitializedVars; return; } if (whileResult.flowTerminated || constCondition == symTable.trueType) { this.uninitializedVars = whileResult.uninitializedVars; return; } this.uninitializedVars = mergeUninitializedVars(this.uninitializedVars, whileResult.uninitializedVars); } @Override public void visit(BLangDo doNode) { analyzeNode(doNode.body, env); if (doNode.onFailClause != null) { analyzeNode(doNode.onFailClause, env); } } public void visit(BLangFail failNode) { analyzeNode(failNode.expr, env); } @Override public void visit(BLangLock lockNode) { analyzeNode(lockNode.body, this.env); if (lockNode.onFailClause != null) { analyzeNode(lockNode.onFailClause, env); } } @Override public void visit(BLangTransaction transactionNode) { analyzeNode(transactionNode.transactionBody, env); if (transactionNode.onFailClause != null) { analyzeNode(transactionNode.onFailClause, env); } Name transactionPkgName = names.fromString(Names.DOT.value + Names.TRANSACTION_PACKAGE.value); Name compUnitName = names.fromString(transactionNode.pos.lineRange().filePath()); this.symResolver.resolvePrefixSymbol(env, transactionPkgName, compUnitName); } @Override public void visit(BLangTransactionalExpr transactionalExpr) { } @Override public void visit(BLangCommitExpr commitExpr) { } @Override public void visit(BLangRollback rollbackNode) { analyzeNode(rollbackNode.expr, env); } @Override public void visit(BLangTupleDestructure stmt) { analyzeNode(stmt.expr, env); checkAssignment(stmt.varRef); } @Override public void visit(BLangForkJoin forkJoin) { /* ignore */ } @Override public void visit(BLangWorkerSend workerSendNode) { analyzeNode(workerSendNode.expr, env); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { analyzeNode(syncSendExpr.expr, env); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { } @Override public void visit(BLangLiteral literalExpr) { } @Override public void visit(BLangConstRef constRef) { } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { listConstructorExpr.exprs.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { tableConstructorExpr.recordLiteralList.forEach(expr -> analyzeNode(expr, env)); checkForDuplicateKeys(tableConstructorExpr); } private void checkForDuplicateKeys(BLangTableConstructorExpr tableConstructorExpr) { Set<Integer> keyHashSet = new HashSet<>(); List<String> fieldNames = getFieldNames(tableConstructorExpr); HashMap<Integer, List<BLangExpression>> keyValues = new HashMap<>(); if (!fieldNames.isEmpty()) { for (BLangRecordLiteral literal : tableConstructorExpr.recordLiteralList) { List<BLangExpression> keyArray = createKeyArray(literal, fieldNames); int hashInt = generateHash(keyArray); if (!keyHashSet.add(hashInt) && checkForKeyEquality(keyValues, keyArray, hashInt)) { String fields = String.join(", ", fieldNames); String values = keyArray.stream().map(Object::toString).collect(Collectors.joining(", ")); dlog.error(literal.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_TABLE_LITERAL, fields, values); } keyValues.put(hashInt, keyArray); } } } private boolean checkForKeyEquality(HashMap<Integer, List<BLangExpression>> keyValues, List<BLangExpression> keyArray, int hash) { List<BLangExpression> existingExpList = keyValues.get(hash); if (existingExpList.size() == keyArray.size()) { boolean isEqual = true; for (int i = 0; i < keyArray.size(); i++) { isEqual = isEqual && equality(keyArray.get(i), existingExpList.get(i)); } return isEqual; } return false; } private int generateHash(List<BLangExpression> keyArray) { int result = 0; for (BLangExpression expr : keyArray) { result = 31 * result + hash(expr); } return result; } public boolean equality(Node nodeA, Node nodeB) { if (nodeA == null || nodeB == null) { return nodeA == nodeB; } if (nodeA.getKind() != nodeB.getKind()) { return false; } boolean isEqual = true; switch (nodeA.getKind()) { case RECORD_LITERAL_EXPR: BLangRecordLiteral recordLiteralA = (BLangRecordLiteral) nodeA; BLangRecordLiteral recordLiteralB = (BLangRecordLiteral) nodeB; for (int i = 0; isEqual && i < recordLiteralA.fields.size(); i++) { RecordLiteralNode.RecordField exprA = recordLiteralA.fields.get(i); RecordLiteralNode.RecordField exprB = recordLiteralB.fields.get(i); isEqual = equality(exprA, exprB); } return isEqual; case RECORD_LITERAL_KEY_VALUE: BLangRecordLiteral.BLangRecordKeyValueField fieldA = (BLangRecordLiteral.BLangRecordKeyValueField) nodeA; BLangRecordLiteral.BLangRecordKeyValueField fieldB = (BLangRecordLiteral.BLangRecordKeyValueField) nodeB; return equality(fieldA.valueExpr, fieldB.valueExpr); case LITERAL: case NUMERIC_LITERAL: BLangLiteral literalA = (BLangLiteral) nodeA; BLangLiteral literalB = (BLangLiteral) nodeB; return Objects.equals(literalA.value, literalB.value); case XML_TEXT_LITERAL: BLangXMLTextLiteral textLiteralA = (BLangXMLTextLiteral) nodeA; BLangXMLTextLiteral textLiteralB = (BLangXMLTextLiteral) nodeB; isEqual = equality(textLiteralA.concatExpr, textLiteralB.concatExpr); for (int i = 0; isEqual && i < textLiteralA.textFragments.size(); i++) { BLangExpression exprA = textLiteralA.textFragments.get(i); BLangExpression exprB = textLiteralB.textFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XML_ATTRIBUTE: BLangXMLAttribute attributeA = (BLangXMLAttribute) nodeA; BLangXMLAttribute attributeB = (BLangXMLAttribute) nodeB; return equality(attributeA.name, attributeB.name) && equality(attributeA.value, attributeB.value); case XML_QNAME: BLangXMLQName xmlqNameA = (BLangXMLQName) nodeA; BLangXMLQName xmlqNameB = (BLangXMLQName) nodeA; return equality(xmlqNameA.localname, xmlqNameB.localname) && equality(xmlqNameA.prefix, xmlqNameB.prefix); case XML_ELEMENT_LITERAL: BLangXMLElementLiteral eleLiteralA = (BLangXMLElementLiteral) nodeA; BLangXMLElementLiteral eleLiteralB = (BLangXMLElementLiteral) nodeB; isEqual = equality(eleLiteralA.startTagName, eleLiteralB.startTagName) && equality(eleLiteralA.endTagName, eleLiteralB.endTagName); for (int i = 0; isEqual && i < eleLiteralA.attributes.size(); i++) { BLangExpression exprA = eleLiteralA.attributes.get(i); BLangExpression exprB = eleLiteralB.attributes.get(i); isEqual = equality(exprA, exprB); } for (int i = 0; isEqual && i < eleLiteralA.children.size(); i++) { BLangExpression exprA = eleLiteralA.children.get(i); BLangExpression exprB = eleLiteralB.children.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XML_COMMENT_LITERAL: BLangXMLCommentLiteral commentliteralA = (BLangXMLCommentLiteral) nodeA; BLangXMLCommentLiteral commentliteralB = (BLangXMLCommentLiteral) nodeB; isEqual = equality(commentliteralA.concatExpr, commentliteralB.concatExpr); for (int i = 0; isEqual && i < commentliteralA.textFragments.size(); i++) { BLangExpression exprA = commentliteralA.textFragments.get(i); BLangExpression exprB = commentliteralB.textFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XML_QUOTED_STRING: BLangXMLQuotedString quotedLiteralA = (BLangXMLQuotedString) nodeA; BLangXMLQuotedString quotedLiteralB = (BLangXMLQuotedString) nodeB; isEqual = equality(quotedLiteralA.concatExpr, quotedLiteralB.concatExpr); for (int i = 0; isEqual && i < quotedLiteralA.textFragments.size(); i++) { BLangExpression exprA = quotedLiteralA.textFragments.get(i); BLangExpression exprB = quotedLiteralB.textFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XMLNS: BLangXMLNS xmlnsA = (BLangXMLNS) nodeA; BLangXMLNS xmlnsB = (BLangXMLNS) nodeB; return equality(xmlnsA.prefix, xmlnsB.prefix) && equality(xmlnsA.namespaceURI, xmlnsB.namespaceURI); case XML_PI_LITERAL: BLangXMLProcInsLiteral insLiteralA = (BLangXMLProcInsLiteral) nodeA; BLangXMLProcInsLiteral insLiteralB = (BLangXMLProcInsLiteral) nodeB; isEqual = equality(insLiteralA.target, insLiteralB.target) && equality(insLiteralA.dataConcatExpr, insLiteralB.dataConcatExpr); for (int i = 0; isEqual && i < insLiteralA.dataFragments.size(); i++) { BLangExpression exprA = insLiteralA.dataFragments.get(i); BLangExpression exprB = insLiteralB.dataFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case IDENTIFIER: BLangIdentifier identifierA = (BLangIdentifier) nodeA; BLangIdentifier identifierB = (BLangIdentifier) nodeB; return identifierA.value.equals(identifierB.value); case SIMPLE_VARIABLE_REF: BLangSimpleVarRef simpleVarRefA = (BLangSimpleVarRef) nodeA; BLangSimpleVarRef simpleVarRefB = (BLangSimpleVarRef) nodeB; return simpleVarRefA.variableName.equals(simpleVarRefB.variableName); case STRING_TEMPLATE_LITERAL: BLangStringTemplateLiteral stringTemplateLiteralA = (BLangStringTemplateLiteral) nodeA; BLangStringTemplateLiteral stringTemplateLiteralB = (BLangStringTemplateLiteral) nodeB; for (int i = 0; isEqual && i < stringTemplateLiteralA.exprs.size(); i++) { BLangExpression exprA = stringTemplateLiteralA.exprs.get(i); BLangExpression exprB = stringTemplateLiteralB.exprs.get(i); isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB); } return isEqual; case LIST_CONSTRUCTOR_EXPR: BLangListConstructorExpr listConstructorExprA = (BLangListConstructorExpr) nodeA; BLangListConstructorExpr listConstructorExprB = (BLangListConstructorExpr) nodeB; for (int i = 0; isEqual && i < listConstructorExprA.exprs.size(); i++) { BLangExpression exprA = listConstructorExprA.exprs.get(i); BLangExpression exprB = listConstructorExprB.exprs.get(i); isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB); } return isEqual; case TABLE_CONSTRUCTOR_EXPR: BLangTableConstructorExpr tableConstructorExprA = (BLangTableConstructorExpr) nodeA; BLangTableConstructorExpr tableConstructorExprB = (BLangTableConstructorExpr) nodeB; for (int i = 0; isEqual && i < tableConstructorExprA.recordLiteralList.size(); i++) { BLangExpression exprA = tableConstructorExprA.recordLiteralList.get(i); BLangExpression exprB = tableConstructorExprB.recordLiteralList.get(i); isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB); } return isEqual; case TYPE_CONVERSION_EXPR: BLangTypeConversionExpr typeConversionExprA = (BLangTypeConversionExpr) nodeA; BLangTypeConversionExpr typeConversionExprB = (BLangTypeConversionExpr) nodeB; return equality(typeConversionExprA.expr, typeConversionExprB.expr); case BINARY_EXPR: BLangBinaryExpr binaryExprA = (BLangBinaryExpr) nodeA; BLangBinaryExpr binaryExprB = (BLangBinaryExpr) nodeB; return equality(binaryExprA.lhsExpr, binaryExprB.lhsExpr) && equality(binaryExprA.rhsExpr, binaryExprB.rhsExpr); case UNARY_EXPR: BLangUnaryExpr unaryExprA = (BLangUnaryExpr) nodeA; BLangUnaryExpr unaryExprB = (BLangUnaryExpr) nodeB; return equality(unaryExprA.expr, unaryExprB.expr); case TYPE_TEST_EXPR: BLangTypeTestExpr typeTestExprA = (BLangTypeTestExpr) nodeA; BLangTypeTestExpr typeTestExprB = (BLangTypeTestExpr) nodeB; return equality(typeTestExprA.expr, typeTestExprB.expr); case TERNARY_EXPR: BLangTernaryExpr ternaryExprA = (BLangTernaryExpr) nodeA; BLangTernaryExpr ternaryExprB = (BLangTernaryExpr) nodeB; return equality(ternaryExprA.expr, ternaryExprB.expr) && equality(ternaryExprA.thenExpr, ternaryExprB.thenExpr) && equality(ternaryExprA.elseExpr, ternaryExprB.elseExpr); case GROUP_EXPR: BLangGroupExpr groupExprA = (BLangGroupExpr) nodeA; BLangGroupExpr groupExprB = (BLangGroupExpr) nodeA; return equality(groupExprA.expression, groupExprB.expression); default: return false; } } public Integer hash(Node node) { int result = 0; if (node == null) { return result; } if (node.getKind() == NodeKind.RECORD_LITERAL_EXPR) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) node; for (RecordLiteralNode.RecordField entry : recordLiteral.fields) { result = 31 * result + hash(entry); } } else if (node.getKind() == NodeKind.RECORD_LITERAL_KEY_VALUE) { BLangRecordLiteral.BLangRecordKeyValueField field = (BLangRecordLiteral.BLangRecordKeyValueField) node; result = 31 * result + hash(field.key.expr) + hash(field.valueExpr); } else if (node.getKind() == NodeKind.ARRAY_LITERAL_EXPR) { BLangListConstructorExpr.BLangArrayLiteral arrayLiteral = (BLangListConstructorExpr.BLangArrayLiteral) node; for (BLangExpression expr : arrayLiteral.exprs) { result = 31 * result + hash(expr); } } else if (node.getKind() == NodeKind.LITERAL | node.getKind() == NodeKind.NUMERIC_LITERAL) { BLangLiteral literal = (BLangLiteral) node; result = Objects.hash(literal.value); } else if (node.getKind() == NodeKind.XML_TEXT_LITERAL) { BLangXMLTextLiteral literal = (BLangXMLTextLiteral) node; result = 31 * result + hash(literal.concatExpr); for (BLangExpression expr : literal.textFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.XML_ATTRIBUTE) { BLangXMLAttribute attribute = (BLangXMLAttribute) node; result = 31 * result + hash(attribute.name) + hash(attribute.value); } else if (node.getKind() == NodeKind.XML_QNAME) { BLangXMLQName xmlqName = (BLangXMLQName) node; result = 31 * result + hash(xmlqName.localname) + hash(xmlqName.prefix); } else if (node.getKind() == NodeKind.XML_COMMENT_LITERAL) { BLangXMLCommentLiteral literal = (BLangXMLCommentLiteral) node; result = 31 * result + hash(literal.concatExpr); for (BLangExpression expr : literal.textFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.XML_ELEMENT_LITERAL) { BLangXMLElementLiteral literal = (BLangXMLElementLiteral) node; result = 31 * result + hash(literal.startTagName) + hash(literal.endTagName); for (BLangExpression expr : literal.attributes) { result = 31 * result + hash(expr); } for (BLangExpression expr : literal.children) { result = 31 * result + hash(expr); } } else if (node.getKind() == NodeKind.XML_QUOTED_STRING) { BLangXMLQuotedString literal = (BLangXMLQuotedString) node; result = 31 * result + hash(literal.concatExpr); for (BLangExpression expr : literal.textFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.XMLNS) { BLangXMLNS xmlns = (BLangXMLNS) node; result = result * 31 + hash(xmlns.prefix) + hash(xmlns.namespaceURI); } else if (node.getKind() == NodeKind.XML_PI_LITERAL) { BLangXMLProcInsLiteral literal = (BLangXMLProcInsLiteral) node; result = 31 * result + hash(literal.target) + hash(literal.dataConcatExpr); for (BLangExpression expr : literal.dataFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.IDENTIFIER) { BLangIdentifier identifier = (BLangIdentifier) node; result = identifier.value.hashCode(); } else if (node.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) node; result = simpleVarRef.variableName.hashCode(); } else if (node.getKind() == NodeKind.STRING_TEMPLATE_LITERAL) { BLangStringTemplateLiteral stringTemplateLiteral = (BLangStringTemplateLiteral) node; for (BLangExpression expr : stringTemplateLiteral.exprs) { result = result * 31 + getTypeHash(stringTemplateLiteral.getBType()) + hash(expr); } } else if (node.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr) node; for (BLangExpression expr : listConstructorExpr.exprs) { result = result * 31 + getTypeHash(listConstructorExpr.getBType()) + hash(expr); } } else if (node.getKind() == NodeKind.TABLE_CONSTRUCTOR_EXPR) { BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) node; for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { result = result * 31 + getTypeHash(tableConstructorExpr.getBType()) + hash(recordLiteral); } } else if (node.getKind() == NodeKind.TYPE_CONVERSION_EXPR) { BLangTypeConversionExpr typeConversionExpr = (BLangTypeConversionExpr) node; result = 31 * result + hash(typeConversionExpr.expr); } else if (node.getKind() == NodeKind.BINARY_EXPR) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) node; result = 31 * result + hash(binaryExpr.lhsExpr) + hash(binaryExpr.rhsExpr); } else if (node.getKind() == NodeKind.UNARY_EXPR) { BLangUnaryExpr unaryExpr = (BLangUnaryExpr) node; result = 31 * result + hash(unaryExpr.expr); } else if (node.getKind() == NodeKind.TYPE_TEST_EXPR) { BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) node; result = 31 * result + hash(typeTestExpr.expr); } else if (node.getKind() == NodeKind.TERNARY_EXPR) { BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) node; result = 31 * result + hash(ternaryExpr.expr) + hash(ternaryExpr.thenExpr) + hash(ternaryExpr.elseExpr); } else if (node.getKind() == NodeKind.GROUP_EXPR) { BLangGroupExpr groupExpr = (BLangGroupExpr) node; result = 31 * result + hash(groupExpr.expression); } else { dlog.error(((BLangExpression) node).pos, DiagnosticErrorCode.EXPRESSION_IS_NOT_A_CONSTANT_EXPRESSION); } return result; } private Integer getTypeHash(BType type) { return Objects.hash(type.tag, type.name); } private List<BLangExpression> createKeyArray(BLangRecordLiteral literal, List<String> fieldNames) { Map<String, BLangExpression> fieldMap = new HashMap<>(); for (RecordLiteralNode.RecordField recordField : literal.fields) { if (recordField.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyVal = (BLangRecordLiteral.BLangRecordKeyValueField) recordField; fieldMap.put(keyVal.key.expr.toString(), keyVal.valueExpr); } else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField recordVarNameField = (BLangRecordLiteral.BLangRecordVarNameField) recordField; fieldMap.put(recordVarNameField.getVariableName().value, recordVarNameField); } } return fieldNames.stream().map(fieldMap::get).collect(Collectors.toList()); } private List<String> getFieldNames(BLangTableConstructorExpr constructorExpr) { List<String> fieldNames = null; if (Types.getReferredType(constructorExpr.getBType()).tag == TypeTags.TABLE) { fieldNames = ((BTableType) Types.getReferredType(constructorExpr.getBType())).fieldNameList; if (fieldNames != null) { return fieldNames; } } if (constructorExpr.tableKeySpecifier != null && !constructorExpr.tableKeySpecifier.fieldNameIdentifierList.isEmpty()) { BLangTableKeySpecifier tableKeySpecifier = constructorExpr.tableKeySpecifier; return tableKeySpecifier.fieldNameIdentifierList.stream().map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); } else { return new ArrayList<>(); } } @Override public void visit(BLangRecordLiteral recordLiteral) { for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; if (keyValuePair.key.computedKey) { analyzeNode(keyValuePair.key.expr, env); } analyzeNode(keyValuePair.valueExpr, env); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { analyzeNode((BLangRecordLiteral.BLangRecordVarNameField) field, env); } else { analyzeNode(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env); } } } @Override public void visit(BLangSimpleVarRef varRefExpr) { this.unusedErrorVarsDeclaredWithVar.remove(varRefExpr.symbol); if (isNotVariableReferenceLVExpr(varRefExpr)) { this.unusedLocalVariables.remove(varRefExpr.symbol); } checkVarRef(varRefExpr.symbol, varRefExpr.pos); } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { if (!fieldAccessExpr.isLValue && isObjectMemberAccessWithSelf(fieldAccessExpr)) { checkVarRef(fieldAccessExpr.symbol, fieldAccessExpr.pos); } analyzeNode(fieldAccessExpr.expr, env); } @Override public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { if (!nsPrefixedFieldBasedAccess.isLValue && isObjectMemberAccessWithSelf(nsPrefixedFieldBasedAccess)) { checkVarRef(nsPrefixedFieldBasedAccess.symbol, nsPrefixedFieldBasedAccess.pos); } analyzeNode(nsPrefixedFieldBasedAccess.expr, env); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeNode(indexAccessExpr.expr, env); analyzeNode(indexAccessExpr.indexExpr, env); } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { tableMultiKeyExpr.multiKeyIndexExprs.forEach(value -> analyzeNode(value, env)); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { analyzeNode(xmlElementAccess.expr, env); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { analyzeNode(xmlNavigation.expr, env); if (xmlNavigation.childIndex == null) { analyzeNode(xmlNavigation.childIndex, env); } } @Override public void visit(BLangInvocation invocationExpr) { analyzeNode(invocationExpr.expr, env); BSymbol symbol = invocationExpr.symbol; this.unusedLocalVariables.remove(symbol); if (!isGlobalVarsInitialized(invocationExpr.pos)) { return; } if (!isFieldsInitializedForSelfArgument(invocationExpr)) { return; } if (!isFieldsInitializedForSelfInvocation(invocationExpr.requiredArgs, invocationExpr.pos)) { return; } if (!isFieldsInitializedForSelfInvocation(invocationExpr.restArgs, invocationExpr.pos)) { return; } checkVarRef(symbol, invocationExpr.pos); invocationExpr.requiredArgs.forEach(expr -> analyzeNode(expr, env)); invocationExpr.restArgs.forEach(expr -> analyzeNode(expr, env)); BSymbol owner = this.env.scope.owner; if (owner.kind == SymbolKind.FUNCTION) { BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) owner; Name name = names.fromIdNode(invocationExpr.name); BSymbol dependsOnFunctionSym = symResolver.lookupSymbolInMainSpace(this.env, name); if (symTable.notFoundSymbol != dependsOnFunctionSym) { addDependency(invokableOwnerSymbol, dependsOnFunctionSym); } } else if (symbol != null && symbol.kind == SymbolKind.FUNCTION) { BInvokableSymbol invokableProviderSymbol = (BInvokableSymbol) symbol; BSymbol curDependent = this.currDependentSymbolDeque.peek(); if (curDependent != null && isGlobalVarSymbol(curDependent)) { addDependency(curDependent, invokableProviderSymbol); } } } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { for (BLangExpression positionalArg : errorConstructorExpr.positionalArgs) { analyzeNode(positionalArg, env); } for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) { analyzeNode(namedArg, env); } } @Override public void visit(BLangActionInvocation actionInvocation) { this.visit((BLangInvocation) actionInvocation); } @Override public void visit(BLangQueryExpr queryExpr) { for (BLangNode clause : queryExpr.getQueryClauses()) { analyzeNode(clause, env); } } @Override public void visit(BLangFromClause fromClause) { BLangExpression collection = fromClause.collection; if (isNotRangeExpr(collection)) { populateUnusedVariableMapForMembers(this.unusedLocalVariables, (BLangVariable) fromClause.variableDefinitionNode.getVariable()); } analyzeNode(collection, env); } @Override public void visit(BLangJoinClause joinClause) { populateUnusedVariableMapForMembers(this.unusedLocalVariables, (BLangVariable) joinClause.variableDefinitionNode.getVariable()); analyzeNode(joinClause.collection, env); if (joinClause.onClause != null) { analyzeNode((BLangNode) joinClause.onClause, env); } } @Override public void visit(BLangLetClause letClause) { for (BLangLetVariable letVariable : letClause.letVarDeclarations) { analyzeNode((BLangNode) letVariable.definitionNode, env); } } @Override public void visit(BLangWhereClause whereClause) { analyzeNode(whereClause.expression, env); } @Override public void visit(BLangOnClause onClause) { analyzeNode(onClause.lhsExpr, env); analyzeNode(onClause.rhsExpr, env); } @Override public void visit(BLangOrderKey orderKeyClause) { analyzeNode(orderKeyClause.expression, env); } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.orderByKeyList.forEach(value -> analyzeNode((BLangNode) value, env)); } @Override public void visit(BLangSelectClause selectClause) { analyzeNode(selectClause.expression, env); } @Override public void visit(BLangOnConflictClause onConflictClause) { analyzeNode(onConflictClause.expression, env); } @Override public void visit(BLangLimitClause limitClause) { analyzeNode(limitClause.expression, env); } @Override public void visit(BLangDoClause doClause) { analyzeNode(doClause.body, env); } @Override public void visit(BLangOnFailClause onFailClause) { analyzeNode((BLangVariable) onFailClause.variableDefinitionNode.getVariable(), env); analyzeNode(onFailClause.body, env); } private boolean isFieldsInitializedForSelfArgument(BLangInvocation invocationExpr) { if (invocationExpr.expr == null || !isSelfKeyWordExpr(invocationExpr.expr)) { return true; } StringBuilder uninitializedFields = getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef) invocationExpr.expr).symbol.type); if (uninitializedFields.length() != 0) { this.dlog.error(invocationExpr.pos, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS, uninitializedFields.toString()); return false; } return true; } private boolean isFieldsInitializedForSelfInvocation(List<BLangExpression> argExpressions, Location location) { for (BLangExpression expr : argExpressions) { if (isSelfKeyWordExpr(expr)) { StringBuilder uninitializedFields = getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef) expr).symbol.type); if (uninitializedFields.length() != 0) { this.dlog.error(location, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS, uninitializedFields.toString()); return false; } } } return true; } private boolean isGlobalVarsInitialized(Location pos) { if (env.isModuleInit) { boolean isFirstUninitializedField = true; StringBuilder uninitializedFields = new StringBuilder(); for (BSymbol symbol : this.uninitializedVars.keySet()) { if (isFirstUninitializedField) { uninitializedFields = new StringBuilder(symbol.getName().value); isFirstUninitializedField = false; } else { uninitializedFields.append(", ").append(symbol.getName().value); } } if (uninitializedFields.length() != 0) { this.dlog.error(pos, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_VARIABLES, uninitializedFields.toString()); return false; } } return true; } private boolean isSelfKeyWordExpr(BLangExpression expr) { return expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && Names.SELF.value.equals(((BLangSimpleVarRef) expr).getVariableName().getValue()); } private StringBuilder getUninitializedFieldsForSelfKeyword(BObjectType objType) { boolean isFirstUninitializedField = true; StringBuilder uninitializedFields = new StringBuilder(); for (BField field : objType.fields.values()) { if (this.uninitializedVars.containsKey(field.symbol)) { if (isFirstUninitializedField) { uninitializedFields = new StringBuilder(field.symbol.getName().value); isFirstUninitializedField = false; } else { uninitializedFields.append(", ").append(field.symbol.getName().value); } } } return uninitializedFields; } private boolean isGlobalVarSymbol(BSymbol symbol) { if (symbol == null) { return false; } else if (symbol.owner == null) { return false; } else if (symbol.owner.tag != SymTag.PACKAGE) { return false; } return isVariableOrConstant(symbol); } private boolean isVariableOrConstant(BSymbol symbol) { if (symbol == null) { return false; } return ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) || ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT); } /** * Register dependent symbol to the provider symbol. * Let global int a = b, a depend on b. * Let func foo() { returns b + 1; }, where b is a global var, then foo depends on b. * * @param dependent dependent. * @param provider object which provides a value. */ private void addDependency(BSymbol dependent, BSymbol provider) { if (provider == null || dependent == null || dependent.pkgID != provider.pkgID) { return; } Set<BSymbol> providers = globalNodeDependsOn.computeIfAbsent(dependent, s -> new LinkedHashSet<>()); providers.add(provider); addFunctionToGlobalVarDependency(dependent, provider); } private void addFunctionToGlobalVarDependency(BSymbol dependent, BSymbol provider) { if (dependent.kind != SymbolKind.FUNCTION && !isGlobalVarSymbol(dependent)) { return; } if (isVariableOrConstant(provider) && !isGlobalVarSymbol(provider)) { return; } Set<BSymbol> providers = this.functionToDependency.computeIfAbsent(dependent, s -> new HashSet<>()); providers.add(provider); } @Override public void visit(BLangTypeInit typeInitExpr) { typeInitExpr.argsExpr.forEach(argExpr -> analyzeNode(argExpr, env)); if (this.currDependentSymbolDeque.peek() != null) { addDependency(this.currDependentSymbolDeque.peek(), Types.getReferredType(typeInitExpr.getBType()).tsymbol); } } @Override public void visit(BLangTernaryExpr ternaryExpr) { analyzeNode(ternaryExpr.expr, env); analyzeNode(ternaryExpr.thenExpr, env); analyzeNode(ternaryExpr.elseExpr, env); } @Override public void visit(BLangWaitExpr waitExpr) { analyzeNode(waitExpr.getExpression(), env); } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { waitForAllExpr.keyValuePairs.forEach(keyValue -> { BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr; analyzeNode(expr, env); }); } @Override public void visit(BLangBinaryExpr binaryExpr) { analyzeNode(binaryExpr.lhsExpr, env); analyzeNode(binaryExpr.rhsExpr, env); } @Override public void visit(BLangElvisExpr elvisExpr) { analyzeNode(elvisExpr.lhsExpr, env); analyzeNode(elvisExpr.rhsExpr, env); } @Override public void visit(BLangGroupExpr groupExpr) { analyzeNode(groupExpr.expression, env); } @Override public void visit(BLangUnaryExpr unaryExpr) { analyzeNode(unaryExpr.expr, env); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { analyzeNode(conversionExpr.expr, env); } @Override public void visit(BLangXMLAttribute xmlAttribute) { analyzeNode(xmlAttribute.value, env); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.children.forEach(expr -> analyzeNode(expr, env)); xmlElementLiteral.attributes.forEach(expr -> analyzeNode(expr, env)); xmlElementLiteral.inlineNamespaces.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.textFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.textFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.dataFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.textFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { stringTemplateLiteral.exprs.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { for (BLangLiteral string : rawTemplateLiteral.strings) { analyzeNode(string, env); } for (BLangExpression expr : rawTemplateLiteral.insertions) { analyzeNode(expr, env); } } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { BLangFunction funcNode = bLangLambdaFunction.function; SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); visitFunctionBodyWithDynamicEnv(funcNode, funcEnv); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { analyzeNode(bLangVarArgsExpression.expr, env); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { analyzeNode(bLangNamedArgsExpression.expr, env); } @Override public void visit(BLangIsAssignableExpr assignableExpr) { } @Override public void visit(BLangMatchExpression matchExpression) { analyzeNode(matchExpression.expr, env); matchExpression.patternClauses.forEach(pattern -> analyzeNode(pattern, env)); } @Override public void visit(BLangMatchExprPatternClause matchExprPatternClause) { analyzeNode(matchExprPatternClause.expr, env); } @Override public void visit(BLangCheckedExpr checkedExpr) { analyzeNode(checkedExpr.expr, env); } @Override public void visit(BLangCheckPanickedExpr checkPanicExpr) { analyzeNode(checkPanicExpr.expr, env); } @Override public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { bLangXMLSequenceLiteral.xmlItems.forEach(xml -> analyzeNode(xml, env)); } @Override public void visit(BLangExpressionStmt exprStmtNode) { analyzeNode(exprStmtNode.expr, env); } @Override public void visit(BLangAnnotation annotationNode) { } @Override public void visit(BLangAnnotationAttachment annAttachmentNode) { } @Override public void visit(BLangRetry retryNode) { analyzeNode(retryNode.retryBody, env); if (retryNode.onFailClause != null) { analyzeNode(retryNode.onFailClause, env); } } @Override public void visit(BLangRetryTransaction retryTransaction) { analyzeNode(retryTransaction.transaction, env); } @Override public void visit(BLangContinue continueNode) { terminateFlow(); } @Override public void visit(BLangTypedescExpr accessExpr) { } @Override public void visit(BLangXMLQName xmlQName) { } @Override public void visit(BLangArrowFunction bLangArrowFunction) { for (ClosureVarSymbol closureVarSymbol : bLangArrowFunction.closureVarSymbols) { BSymbol symbol = closureVarSymbol.bSymbol; if (this.uninitializedVars.containsKey(symbol)) { this.dlog.error(closureVarSymbol.diagnosticLocation, DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol); } this.unusedErrorVarsDeclaredWithVar.remove(symbol); this.unusedLocalVariables.remove(symbol); } } @Override public void visit(BLangValueType valueType) { } @Override public void visit(BLangConstant constant) { boolean validVariable = constant.symbol != null; if (validVariable) { this.currDependentSymbolDeque.push(constant.symbol); } try { analyzeNode(constant.expr, env); } finally { if (validVariable) { this.currDependentSymbolDeque.pop(); } } } @Override public void visit(BLangArrayType arrayType) { analyzeNode(arrayType.getElementType(), env); } @Override public void visit(BLangBuiltInRefTypeNode builtInRefType) { } @Override public void visit(BLangConstrainedType constrainedType) { analyzeNode(constrainedType.constraint, env); } @Override public void visit(BLangStreamType streamType) { analyzeNode(streamType.constraint, env); analyzeNode(streamType.error, env); } @Override public void visit(BLangTableTypeNode tableType) { analyzeNode(tableType.constraint, env); if (tableType.tableKeyTypeConstraint != null) { analyzeNode(tableType.tableKeyTypeConstraint.keyType, env); } } @Override public void visit(BLangUserDefinedType userDefinedType) { if (this.currDependentSymbolDeque.isEmpty()) { return; } BType resolvedType = Types.getReferredType(userDefinedType.getBType()); if (resolvedType == symTable.semanticError) { return; } BTypeSymbol tsymbol = resolvedType.tsymbol; recordGlobalVariableReferenceRelationship(tsymbol); } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { if (functionTypeNode.flagSet.contains(Flag.ANY_FUNCTION)) { return; } functionTypeNode.params.forEach(param -> analyzeNode(param.typeNode, env)); analyzeNode(functionTypeNode.returnTypeNode, env); } @Override public void visit(BLangUnionTypeNode unionTypeNode) { unionTypeNode.memberTypeNodes.forEach(typeNode -> analyzeNode(typeNode, env)); } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) { analyzeNode(constituentTypeNode, env); } } @Override public void visit(BLangObjectTypeNode objectTypeNode) { } @Override public void visit(BLangRecordTypeNode recordTypeNode) { BTypeSymbol tsymbol = Types.getReferredType(recordTypeNode.getBType()).tsymbol; for (TypeNode type : recordTypeNode.getTypeReferences()) { BLangType bLangType = (BLangType) type; analyzeNode(bLangType, env); recordGlobalVariableReferenceRelationship( Types.getReferredType(bLangType.getBType()).tsymbol); } for (BLangSimpleVariable field : recordTypeNode.fields) { addTypeDependency(tsymbol, Types.getReferredType(field.getBType()), new HashSet<>()); analyzeNode(field, env); recordGlobalVariableReferenceRelationship(field.symbol); } } private void addTypeDependency(BTypeSymbol dependentTypeSymbol, BType providerType, Set<BType> unresolvedTypes) { if (unresolvedTypes.contains(providerType)) { return; } unresolvedTypes.add(providerType); switch (providerType.tag) { case TypeTags.UNION: for (BType memberType : ((BUnionType) providerType).getMemberTypes()) { BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(memberType); addTypeDependency(dependentTypeSymbol, effectiveType, unresolvedTypes); } break; case TypeTags.ARRAY: addTypeDependency(dependentTypeSymbol, types.getTypeWithEffectiveIntersectionTypes(((BArrayType) providerType).getElementType()), unresolvedTypes); break; case TypeTags.MAP: addTypeDependency(dependentTypeSymbol, types.getTypeWithEffectiveIntersectionTypes(((BMapType) providerType).getConstraint()), unresolvedTypes); break; case TypeTags.TYPEREFDESC: addTypeDependency(dependentTypeSymbol, Types.getReferredType(providerType), unresolvedTypes); break; default: addDependency(dependentTypeSymbol, providerType.tsymbol); } } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { finiteTypeNode.valueSpace.forEach(value -> analyzeNode(value, env)); } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { tupleTypeNode.memberTypeNodes.forEach(type -> analyzeNode(type, env)); } @Override public void visit(BLangMarkdownDocumentationLine bLangMarkdownDocumentationLine) { } @Override public void visit(BLangMarkdownParameterDocumentation bLangDocumentationParameter) { } @Override public void visit(BLangMarkdownReturnParameterDocumentation bLangMarkdownReturnParameterDocumentation) { } @Override public void visit(BLangMarkdownDocumentation bLangMarkdownDocumentation) { } @Override public void visit(BLangTestablePackage testablePkgNode) { } @Override public void visit(BLangImportPackage importPkgNode) { } @Override public void visit(BLangIdentifier identifierNode) { } @Override public void visit(BLangPanic panicNode) { analyzeNode(panicNode.expr, env); terminateFlow(); } @Override public void visit(BLangTrapExpr trapExpr) { analyzeNode(trapExpr.expr, env); } public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { if (this.currDependentSymbolDeque.peek() != null) { addDependency(this.currDependentSymbolDeque.peek(), Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol); } addDependency(Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol, serviceConstructorExpr.serviceNode.symbol); analyzeNode(serviceConstructorExpr.serviceNode, env); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { analyzeNode(typeTestExpr.expr, env); analyzeNode(typeTestExpr.typeNode, env); } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { analyzeNode(annotAccessExpr.expr, env); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { } @Override public void visit(BLangErrorType errorType) { } @Override public void visit(BLangRecordDestructure recordDestructure) { analyzeNode(recordDestructure.expr, env); checkAssignment(recordDestructure.varRef); } @Override public void visit(BLangErrorDestructure errorDestructure) { analyzeNode(errorDestructure.expr, env); checkAssignment(errorDestructure.varRef); } @Override public void visit(BLangTupleVarRef tupleVarRefExpr) { tupleVarRefExpr.expressions.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangRecordVarRef varRefExpr) { varRefExpr.recordRefFields.forEach(expr -> analyzeNode(expr.variableReference, env)); } @Override public void visit(BLangErrorVarRef varRefExpr) { analyzeNode(varRefExpr.message, env); if (varRefExpr.cause != null) { analyzeNode(varRefExpr.cause, env); } for (BLangNamedArgsExpression args : varRefExpr.detail) { analyzeNode(args.expr, env); } analyzeNode(varRefExpr.restVar, env); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { analyzeNode(bLangTupleVariable.typeNode, env); populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangTupleVariable); this.currDependentSymbolDeque.push(bLangTupleVariable.symbol); analyzeNode(bLangTupleVariable.expr, env); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { analyzeNode(bLangTupleVariableDef.var, env); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { analyzeNode(bLangRecordVariable.typeNode, env); populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangRecordVariable); this.currDependentSymbolDeque.push(bLangRecordVariable.symbol); analyzeNode(bLangRecordVariable.expr, env); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { analyzeNode(bLangRecordVariableDef.var, env); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { analyzeNode(bLangErrorVariable.typeNode, env); populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangErrorVariable); this.currDependentSymbolDeque.push(bLangErrorVariable.symbol); analyzeNode(bLangErrorVariable.expr, env); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { analyzeNode(bLangErrorVariableDef.errorVariable, env); } @Override public void visit(BLangMatchStaticBindingPatternClause bLangMatchStaticBindingPatternClause) { analyzeNode(bLangMatchStaticBindingPatternClause.body, env); } @Override public void visit(BLangMatchStructuredBindingPatternClause bLangMatchStructuredBindingPatternClause) { analyzeNode(bLangMatchStructuredBindingPatternClause.body, env); } private void addUninitializedVar(BLangVariable variable) { if (!this.uninitializedVars.containsKey(variable.symbol)) { this.uninitializedVars.put(variable.symbol, InitStatus.UN_INIT); } } /** * Analyze a branch and returns the set of uninitialized variables for that branch. * This method will not update the current uninitialized variables set. * * @param node Branch node to be analyzed * @param env Symbol environment * @return Result of the branch. */ private BranchResult analyzeBranch(BLangNode node, SymbolEnv env) { Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars; boolean prevFlowTerminated = this.flowTerminated; this.uninitializedVars = copyUninitializedVars(); this.flowTerminated = false; analyzeNode(node, env); BranchResult brachResult = new BranchResult(this.uninitializedVars, this.flowTerminated); this.uninitializedVars = prevUninitializedVars; this.flowTerminated = prevFlowTerminated; return brachResult; } private Map<BSymbol, InitStatus> copyUninitializedVars() { return new HashMap<>(this.uninitializedVars); } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; if (node != null) { node.accept(this); } this.env = prevEnv; } private Map<BSymbol, InitStatus> mergeUninitializedVars(Map<BSymbol, InitStatus> firstUninitVars, Map<BSymbol, InitStatus> secondUninitVars) { List<BSymbol> intersection = new ArrayList<>(firstUninitVars.keySet()); intersection.retainAll(secondUninitVars.keySet()); return Stream.concat(firstUninitVars.entrySet().stream(), secondUninitVars.entrySet().stream()) .collect(Collectors.toMap(entry -> entry.getKey(), entry -> intersection.contains(entry.getKey()) ? entry.getValue() : InitStatus.PARTIAL_INIT, (a, b) -> { if (a == InitStatus.PARTIAL_INIT || b == InitStatus.PARTIAL_INIT) { return InitStatus.PARTIAL_INIT; } return InitStatus.UN_INIT; })); } private void checkVarRef(BSymbol symbol, Location pos) { recordGlobalVariableReferenceRelationship(symbol); InitStatus initStatus = this.uninitializedVars.get(symbol); if (initStatus == null) { return; } if (initStatus == InitStatus.UN_INIT) { this.dlog.error(pos, DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol); return; } this.dlog.error(pos, DiagnosticErrorCode.PARTIALLY_INITIALIZED_VARIABLE, symbol); } private void recordGlobalVariableReferenceRelationship(BSymbol symbol) { if (this.env.scope == null) { return; } boolean globalVarSymbol = isGlobalVarSymbol(symbol); BSymbol ownerSymbol = this.env.scope.owner; boolean isInPkgLevel = ownerSymbol.getKind() == SymbolKind.PACKAGE; if (isInPkgLevel && (globalVarSymbol || symbol instanceof BTypeSymbol) || (ownerSymbol.tag == SymTag.LET && globalVarSymbol)) { BSymbol dependent = this.currDependentSymbolDeque.peek(); addDependency(dependent, symbol); } else if (ownerSymbol.kind == SymbolKind.FUNCTION && globalVarSymbol) { BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) ownerSymbol; addDependency(invokableOwnerSymbol, symbol); } else if (ownerSymbol.kind == SymbolKind.OBJECT && globalVarSymbol) { addDependency(ownerSymbol, symbol); } else if (ownerSymbol.kind == SymbolKind.RECORD && globalVarSymbol) { addDependency(ownerSymbol, symbol); } } private boolean isObjectMemberAccessWithSelf(BLangAccessExpression fieldAccessExpr) { if (fieldAccessExpr.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return Names.SELF.value.equals(((BLangSimpleVarRef) fieldAccessExpr.expr).variableName.value); } private void checkAssignment(BLangExpression varRef) { NodeKind kind = varRef.getKind(); switch (kind) { case RECORD_VARIABLE_REF: BLangRecordVarRef recordVarRef = (BLangRecordVarRef) varRef; recordVarRef.recordRefFields.forEach(field -> checkAssignment(field.variableReference)); if (recordVarRef.restParam != null) { checkAssignment((BLangExpression) recordVarRef.restParam); } return; case TUPLE_VARIABLE_REF: BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) varRef; tupleVarRef.expressions.forEach(this::checkAssignment); if (tupleVarRef.restParam != null) { checkAssignment((BLangExpression) tupleVarRef.restParam); } return; case ERROR_VARIABLE_REF: BLangErrorVarRef errorVarRef = (BLangErrorVarRef) varRef; if (errorVarRef.message != null) { checkAssignment(errorVarRef.message); } if (errorVarRef.cause != null) { checkAssignment(errorVarRef.cause); } for (BLangNamedArgsExpression expression : errorVarRef.detail) { checkAssignment(expression); this.uninitializedVars.remove(((BLangVariableReference) expression.expr).symbol); } if (errorVarRef.restVar != null) { checkAssignment(errorVarRef.restVar); } return; case INDEX_BASED_ACCESS_EXPR: case FIELD_BASED_ACCESS_EXPR: BLangAccessExpression accessExpr = (BLangAccessExpression) varRef; BLangExpression expr = accessExpr.expr; BType type = Types.getReferredType(expr.getBType()); if (isObjectMemberAccessWithSelf(accessExpr)) { BObjectType objectType = (BObjectType) type; BSymbol symbol = accessExpr.symbol; if (this.uninitializedVars.containsKey(symbol)) { this.uninitializedVars.remove(symbol); return; } String fieldName = ((BLangFieldBasedAccess) varRef).field.value; checkFinalEntityUpdate(varRef.pos, fieldName, objectType.fields.get(fieldName).symbol); return; } if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { checkFinalObjectFieldUpdate((BLangFieldBasedAccess) accessExpr); } analyzeNode(expr, env); if (kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { analyzeNode(((BLangIndexBasedAccess) varRef).indexExpr, env); } return; default: break; } if (kind != NodeKind.SIMPLE_VARIABLE_REF && kind != NodeKind.XML_ATTRIBUTE_ACCESS_EXPR) { return; } if (kind == NodeKind.SIMPLE_VARIABLE_REF) { BSymbol symbol = ((BLangSimpleVarRef) varRef).symbol; checkFinalEntityUpdate(varRef.pos, varRef, symbol); BSymbol owner = this.currDependentSymbolDeque.peek(); addFunctionToGlobalVarDependency(owner, ((BLangSimpleVarRef) varRef).symbol); } this.uninitializedVars.remove(((BLangVariableReference) varRef).symbol); } private void checkFinalObjectFieldUpdate(BLangFieldBasedAccess fieldAccess) { BLangExpression expr = fieldAccess.expr; BType exprType = Types.getReferredType(expr.getBType()); if (types.isSubTypeOfBaseType(exprType, TypeTags.OBJECT) && isFinalFieldInAllObjects(fieldAccess.pos, exprType, fieldAccess.field.value)) { dlog.error(fieldAccess.pos, DiagnosticErrorCode.CANNOT_UPDATE_FINAL_OBJECT_FIELD, fieldAccess.symbol); } } private boolean isFinalFieldInAllObjects(Location pos, BType btype, String fieldName) { BType type = Types.getReferredType(btype); if (type.tag == TypeTags.OBJECT) { BField field = ((BObjectType) type).fields.get(fieldName); if (field != null) { return Symbols.isFlagOn(field.symbol.flags, Flags.FINAL); } BObjectTypeSymbol objTypeSymbol = (BObjectTypeSymbol) type.tsymbol; Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objTypeSymbol.name.value, fieldName)); BSymbol funcSymbol = symResolver.resolveObjectMethod(pos, env, funcName, objTypeSymbol); return funcSymbol != null; } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isFinalFieldInAllObjects(pos, memberType, fieldName)) { return false; } } return true; } private void checkFinalEntityUpdate(Location pos, Object field, BSymbol symbol) { if (symbol == null || !Symbols.isFlagOn(symbol.flags, Flags.FINAL)) { return; } if (!this.uninitializedVars.containsKey(symbol)) { dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_FINAL, symbol); return; } InitStatus initStatus = this.uninitializedVars.get(symbol); if (initStatus == InitStatus.PARTIAL_INIT) { dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_TO_POTENTIALLY_INITIALIZED_FINAL, symbol); } } private void terminateFlow() { this.flowTerminated = true; } private void checkUnusedImports(List<BLangImportPackage> imports) { for (BLangImportPackage importStmt : imports) { if (importStmt.symbol == null || importStmt.symbol.isUsed || Names.IGNORE.value.equals(importStmt.alias.value)) { continue; } dlog.error(importStmt.alias.pos, DiagnosticErrorCode.UNUSED_MODULE_PREFIX, importStmt.alias.value); } } private void checkUnusedErrorVarsDeclaredWithVar() { for (Map.Entry<BSymbol, Location> entry : this.unusedErrorVarsDeclaredWithVar.entrySet()) { this.dlog.error(entry.getValue(), DiagnosticErrorCode.UNUSED_VARIABLE_WITH_INFERRED_TYPE_INCLUDING_ERROR, entry.getKey().name); } } private void emitUnusedVariableWarnings(Map<BSymbol, Location> unusedLocalVariables) { for (Map.Entry<BSymbol, Location> entry : unusedLocalVariables.entrySet()) { this.dlog.warning(entry.getValue(), DiagnosticWarningCode.UNUSED_LOCAL_VARIABLE, entry.getKey().name); } } private boolean addVarIfInferredTypeIncludesError(BLangSimpleVariable variable) { BType typeIntersection = types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionContext(), variable.getBType(), symTable.errorType, env); if (typeIntersection != null && typeIntersection != symTable.semanticError && typeIntersection != symTable.noType) { unusedErrorVarsDeclaredWithVar.put(variable.symbol, variable.pos); return true; } return false; } private boolean isLocalVariableDefinedWithNonWildCardBindingPattern(BLangSimpleVariable variable) { if (isWildCardBindingPattern(variable)) { return false; } return isLocalVariable(variable.symbol); } private boolean isWildCardBindingPattern(BLangSimpleVariable variable) { return Names.IGNORE.value.equals(variable.name.value); } private boolean isWildCardBindingPattern(BVarSymbol symbol) { return Names.IGNORE == symbol.name; } private boolean isLocalVariable(BVarSymbol symbol) { if (symbol == null) { return false; } BSymbol owner = symbol.owner; if (owner == null || owner.tag == SymTag.PACKAGE) { return false; } if (owner.tag == SymTag.LET) { return true; } if (owner.tag != SymTag.FUNCTION) { return false; } long flags = symbol.flags; SymbolKind kind = symbol.kind; if (kind == SymbolKind.PATH_PARAMETER || kind == SymbolKind.PATH_REST_PARAMETER) { return false; } return !Symbols.isFlagOn(flags, Flags.REQUIRED_PARAM) && !Symbols.isFlagOn(flags, Flags.DEFAULTABLE_PARAM) && !Symbols.isFlagOn(flags, Flags.INCLUDED) && !Symbols.isFlagOn(flags, Flags.REST_PARAM); } private void populateUnusedVariableMapForNonSimpleBindingPatternVariables( Map<BSymbol, Location> unusedLocalVariables, BLangVariable variable) { if (!isLocalVariable(variable.symbol)) { return; } populateUnusedVariableMapForMembers(unusedLocalVariables, variable); } private void populateUnusedVariableMapForMembers(Map<BSymbol, Location> unusedLocalVariables, BLangVariable variable) { if (variable == null) { return; } switch (variable.getKind()) { case VARIABLE: BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable; if (!isWildCardBindingPattern(simpleVariable)) { unusedLocalVariables.put(simpleVariable.symbol, simpleVariable.pos); } break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; for (BLangRecordVariable.BLangRecordVariableKeyValue member : recordVariable.variableList) { populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern); } populateUnusedVariableMapForMembers(unusedLocalVariables, (BLangVariable) recordVariable.restParam); break; case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; for (BLangVariable memberVariable : tupleVariable.memberVariables) { populateUnusedVariableMapForMembers(unusedLocalVariables, memberVariable); } populateUnusedVariableMapForMembers(unusedLocalVariables, tupleVariable.restVariable); break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.message); populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.cause); for (BLangErrorVariable.BLangErrorDetailEntry member : errorVariable.detail) { populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern); } populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.restDetail); break; } } private boolean isNotVariableReferenceLVExpr(BLangSimpleVarRef varRefExpr) { if (!varRefExpr.isLValue) { return true; } BLangNode parent = varRefExpr.parent; return parent != null && parent.getKind() != NodeKind.ASSIGNMENT; } private boolean isNotRangeExpr(BLangExpression collection) { if (collection.getKind() != NodeKind.BINARY_EXPR) { return true; } OperatorKind opKind = ((BLangBinaryExpr) collection).opKind; return opKind != OperatorKind.HALF_OPEN_RANGE && opKind != OperatorKind.CLOSED_RANGE; } private enum InitStatus { UN_INIT, PARTIAL_INIT } private class BranchResult { Map<BSymbol, InitStatus> uninitializedVars; boolean flowTerminated; BranchResult(Map<BSymbol, InitStatus> uninitializedVars, boolean flowTerminated) { this.uninitializedVars = uninitializedVars; this.flowTerminated = flowTerminated; } } }
class DataflowAnalyzer extends BLangNodeVisitor { private final SymbolResolver symResolver; private final Names names; private SymbolEnv env; private SymbolTable symTable; private BLangDiagnosticLog dlog; private Types types; private Map<BSymbol, InitStatus> uninitializedVars; private Map<BSymbol, Location> unusedErrorVarsDeclaredWithVar; private Map<BSymbol, Location> unusedLocalVariables; private Map<BSymbol, Set<BSymbol>> globalNodeDependsOn; private Map<BSymbol, Set<BSymbol>> functionToDependency; private boolean flowTerminated = false; private static final CompilerContext.Key<DataflowAnalyzer> DATAFLOW_ANALYZER_KEY = new CompilerContext.Key<>(); private Deque<BSymbol> currDependentSymbolDeque; private final GlobalVariableRefAnalyzer globalVariableRefAnalyzer; private DataflowAnalyzer(CompilerContext context) { context.put(DATAFLOW_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.types = Types.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.currDependentSymbolDeque = new ArrayDeque<>(); this.globalVariableRefAnalyzer = GlobalVariableRefAnalyzer.getInstance(context); this.unusedLocalVariables = new HashMap<>(); } public static DataflowAnalyzer getInstance(CompilerContext context) { DataflowAnalyzer dataflowAnalyzer = context.get(DATAFLOW_ANALYZER_KEY); if (dataflowAnalyzer == null) { dataflowAnalyzer = new DataflowAnalyzer(context); } return dataflowAnalyzer; } /** * Perform data-flow analysis on a package. * * @param pkgNode Package to perform data-flow analysis. * @return Data-flow analyzed package */ public BLangPackage analyze(BLangPackage pkgNode) { this.uninitializedVars = new LinkedHashMap<>(); this.globalNodeDependsOn = new LinkedHashMap<>(); this.functionToDependency = new HashMap<>(); this.dlog.setCurrentPackageId(pkgNode.packageID); SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); analyzeNode(pkgNode, pkgEnv); return pkgNode; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.DATAFLOW_ANALYZE)) { return; } Map<BSymbol, Location> prevUnusedErrorVarsDeclaredWithVar = this.unusedErrorVarsDeclaredWithVar; this.unusedErrorVarsDeclaredWithVar = new HashMap<>(); Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); List<TopLevelNode> sortedListOfNodes = new ArrayList<>(pkgNode.globalVars); addModuleInitToSortedNodeList(pkgNode, sortedListOfNodes); addNodesToSortedNodeList(pkgNode, sortedListOfNodes); for (TopLevelNode topLevelNode : sortedListOfNodes) { if (isModuleInitFunction((BLangNode) topLevelNode)) { analyzeModuleInitFunc((BLangFunction) topLevelNode); } else { if (topLevelNode.getKind() == NodeKind.CLASS_DEFN) { BLangClassDefinition classDef = (BLangClassDefinition) topLevelNode; if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) { continue; } } analyzeNode((BLangNode) topLevelNode, env); } } checkForUninitializedGlobalVars(pkgNode.globalVars); pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage)); this.globalVariableRefAnalyzer.analyzeAndReOrder(pkgNode, this.globalNodeDependsOn); this.globalVariableRefAnalyzer.populateFunctionDependencies(this.functionToDependency, pkgNode.globalVars); pkgNode.globalVariableDependencies = globalVariableRefAnalyzer.getGlobalVariablesDependsOn(); checkUnusedImports(pkgNode.imports); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; checkUnusedErrorVarsDeclaredWithVar(); this.unusedErrorVarsDeclaredWithVar = prevUnusedErrorVarsDeclaredWithVar; pkgNode.completedPhases.add(CompilerPhase.DATAFLOW_ANALYZE); } private void addModuleInitToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) { for (TopLevelNode node : pkgNode.topLevelNodes) { if (isModuleInitFunction((BLangNode) node)) { sortedListOfNodes.add(node); break; } } } private void addNodesToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) { pkgNode.topLevelNodes.forEach(topLevelNode -> { if (!sortedListOfNodes.contains(topLevelNode)) { sortedListOfNodes.add(topLevelNode); } }); } private boolean isModuleInitFunction(BLangNode node) { return node.getKind() == NodeKind.FUNCTION && Names.USER_DEFINED_INIT_SUFFIX.value.equals(((BLangFunction) node).name.value); } private void analyzeModuleInitFunc(BLangFunction funcNode) { Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); this.currDependentSymbolDeque.push(funcNode.symbol); SymbolEnv moduleInitFuncEnv = SymbolEnv.createModuleInitFunctionEnv(funcNode, funcNode.symbol.scope, env); for (BLangAnnotationAttachment bLangAnnotationAttachment : funcNode.annAttachments) { analyzeNode(bLangAnnotationAttachment.expr, env); } analyzeNode(funcNode.body, moduleInitFuncEnv); this.currDependentSymbolDeque.pop(); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; } private void checkForUninitializedGlobalVars(List<BLangVariable> globalVars) { for (BLangVariable globalVar : globalVars) { if (globalVar.getKind() == NodeKind.VARIABLE && this.uninitializedVars.containsKey(globalVar.symbol)) { this.dlog.error(globalVar.pos, DiagnosticErrorCode.UNINITIALIZED_VARIABLE, globalVar.symbol); } } } @Override public void visit(BLangResourceFunction funcNode) { visit((BLangFunction) funcNode); } @Override public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); this.currDependentSymbolDeque.push(funcNode.symbol); funcNode.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env)); funcNode.requiredParams.forEach(param -> analyzeNode(param, funcEnv)); analyzeNode(funcNode.restParam, funcEnv); if (funcNode.flagSet.contains(Flag.OBJECT_CTOR)) { visitFunctionBodyWithDynamicEnv(funcNode, funcEnv); } else { analyzeBranch(funcNode.body, funcEnv); } this.currDependentSymbolDeque.pop(); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; } private void visitFunctionBodyWithDynamicEnv(BLangFunction funcNode, SymbolEnv funcEnv) { Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); this.unusedLocalVariables.putAll(prevUnusedLocalVariables); Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars; this.uninitializedVars = copyUninitializedVars(); this.flowTerminated = false; analyzeNode(funcNode.body, funcEnv); this.uninitializedVars = prevUninitializedVars; prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol)); this.unusedLocalVariables.keySet().removeAll(prevUnusedLocalVariables.keySet()); emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVariables; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); bodyEnv.isModuleInit = env.isModuleInit; for (BLangStatement statement : body.stmts) { analyzeNode(statement, bodyEnv); } } @Override public void visit(BLangExprFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); analyzeNode(body.expr, bodyEnv); } @Override public void visit(BLangExternalFunctionBody body) { } @Override public void visit(BLangBlockStmt blockNode) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); blockNode.stmts.forEach(statement -> analyzeNode(statement, blockEnv)); } @Override public void visit(BLangLetExpression letExpression) { for (BLangLetVariable letVarDeclaration : letExpression.letVarDeclarations) { analyzeNode((BLangNode) letVarDeclaration.definitionNode, letExpression.env); } analyzeNode(letExpression.expr, letExpression.env); } @Override public void visit(BLangCompilationUnit compUnit) { } @Override public void visit(BLangXMLNS xmlnsNode) { } @Override public void visit(BLangService service) { this.currDependentSymbolDeque.push(service.serviceClass.symbol); visit(service.serviceClass); for (BLangExpression attachedExpr : service.attachedExprs) { analyzeNode(attachedExpr, env); } service.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env)); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangTypeDefinition typeDefinition) { SymbolEnv typeDefEnv; BSymbol symbol = typeDefinition.symbol; if (typeDefinition.symbol.kind == SymbolKind.TYPE_DEF) { symbol = symbol.type.tsymbol; } typeDefEnv = SymbolEnv.createTypeEnv(typeDefinition.typeNode, symbol.scope, env); this.currDependentSymbolDeque.push(symbol); analyzeNode(typeDefinition.typeNode, typeDefEnv); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangClassDefinition classDef) { SymbolEnv preEnv = env; SymbolEnv env = this.env; Map<BSymbol, Location> prevUnusedLocalVariables = null; Map<BSymbol, InitStatus> prevUninitializedVars = null; boolean visitedOCE = false; if (classDef.flagSet.contains(Flag.OBJECT_CTOR) && classDef.oceEnvData.capturedClosureEnv != null && classDef.oceEnvData.capturedClosureEnv.enclEnv != null) { env = classDef.oceEnvData.capturedClosureEnv.enclEnv; prevUnusedLocalVariables = this.unusedLocalVariables; prevUninitializedVars = this.uninitializedVars; this.unusedLocalVariables = new HashMap<>(); this.unusedLocalVariables.putAll(prevUnusedLocalVariables); this.uninitializedVars = copyUninitializedVars(); this.flowTerminated = false; visitedOCE = true; } SymbolEnv objectEnv = SymbolEnv.createClassEnv(classDef, classDef.symbol.scope, env); this.currDependentSymbolDeque.push(classDef.symbol); for (BLangAnnotationAttachment bLangAnnotationAttachment : classDef.annAttachments) { analyzeNode(bLangAnnotationAttachment.expr, env); } classDef.fields.forEach(field -> analyzeNode(field, objectEnv)); classDef.referencedFields.forEach(field -> analyzeNode(field, objectEnv)); if (classDef.initFunction != null) { if (classDef.initFunction.body == null) { Optional<BLangFunction> outerFuncDef = objectEnv.enclPkg.functions.stream() .filter(f -> f.symbol.name.equals((classDef.initFunction).symbol.name)) .findFirst(); outerFuncDef.ifPresent(bLangFunction -> classDef.initFunction = bLangFunction); } if (classDef.initFunction.body != null) { Map<BSymbol, Location> prevUnusedLocalVars = this.unusedLocalVariables; this.unusedLocalVariables = new HashMap<>(); if (classDef.initFunction.body.getKind() == NodeKind.BLOCK_FUNCTION_BODY) { for (BLangStatement statement : ((BLangBlockFunctionBody) classDef.initFunction.body).stmts) { analyzeNode(statement, objectEnv); } } else if (classDef.initFunction.body.getKind() == NodeKind.EXPR_FUNCTION_BODY) { analyzeNode(((BLangExprFunctionBody) classDef.initFunction.body).expr, objectEnv); } emitUnusedVariableWarnings(this.unusedLocalVariables); this.unusedLocalVariables = prevUnusedLocalVars; } } Stream.concat(classDef.fields.stream(), classDef.referencedFields.stream()) .map(field -> { addTypeDependency(classDef.symbol, field.getBType(), new HashSet<>()); return field; }) .filter(field -> !Symbols.isPrivate(field.symbol)) .forEach(field -> { if (this.uninitializedVars.containsKey(field.symbol)) { this.dlog.error(field.pos, DiagnosticErrorCode.OBJECT_UNINITIALIZED_FIELD, field.symbol); } }); for (BLangFunction function : classDef.functions) { analyzeNode(function, env); } for (BLangType type : classDef.typeRefs) { analyzeNode(type, env); } this.env = preEnv; if (visitedOCE) { this.uninitializedVars = prevUninitializedVars; prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol)); this.unusedLocalVariables = prevUnusedLocalVariables; } this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangObjectConstructorExpression objectConstructorExpression) { BLangClassDefinition classDef = objectConstructorExpression.classNode; if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) { OCEDynamicEnvironmentData oceData = classDef.oceEnvData; for (BSymbol symbol : oceData.closureFuncSymbols) { this.unusedLocalVariables.remove(symbol); } for (BSymbol symbol : oceData.closureBlockSymbols) { this.unusedLocalVariables.remove(symbol); } } visit(objectConstructorExpression.classNode); visit(objectConstructorExpression.typeInit); addDependency(objectConstructorExpression.getBType().tsymbol, objectConstructorExpression.classNode.symbol); } @Override public void visit(BLangSimpleVariableDef varDefNode) { BLangSimpleVariable var = varDefNode.var; if (var.expr == null) { addUninitializedVar(var); analyzeNode(var.typeNode, env); BVarSymbol symbol = var.symbol; if (var.getKind() == NodeKind.VARIABLE && isLocalVariableDefinedWithNonWildCardBindingPattern(var)) { this.unusedLocalVariables.put(symbol, var.pos); } return; } analyzeNode(var, env); } @Override public void visit(BLangSimpleVariable variable) { BVarSymbol symbol = variable.symbol; analyzeNode(variable.typeNode, env); if (symbol == null) { if (variable.expr != null) { analyzeNode(variable.expr, env); } return; } this.currDependentSymbolDeque.push(symbol); if (variable.typeNode != null && variable.typeNode.getBType() != null) { BType type = variable.typeNode.getBType(); recordGlobalVariableReferenceRelationship(Types.getReferredType(type).tsymbol); } boolean withInModuleVarLetExpr = symbol.owner.tag == SymTag.LET && isGlobalVarSymbol(env.enclVarSym); if (withInModuleVarLetExpr) { BVarSymbol dependentVar = env.enclVarSym; this.currDependentSymbolDeque.push(dependentVar); } try { boolean varWithInferredTypeIncludingError = false; if (variable.isDeclaredWithVar) { varWithInferredTypeIncludingError = addVarIfInferredTypeIncludesError(variable); } if (!varWithInferredTypeIncludingError && isLocalVariableDefinedWithNonWildCardBindingPattern(variable) && !isVariableDeclaredForWorkerDeclaration(variable)) { this.unusedLocalVariables.put(symbol, variable.pos); } if (variable.expr != null) { analyzeNode(variable.expr, env); this.uninitializedVars.remove(symbol); return; } long varFlags = symbol.flags; if (Symbols.isFlagOn(varFlags, Flags.CONFIGURABLE) && Symbols.isFlagOn(varFlags, Flags.REQUIRED)) { return; } BSymbol owner = symbol.owner; if (owner.tag != SymTag.PACKAGE && owner.tag != SymTag.OBJECT) { return; } addUninitializedVar(variable); } finally { if (withInModuleVarLetExpr) { this.currDependentSymbolDeque.pop(); } this.currDependentSymbolDeque.pop(); } } private boolean isVariableDeclaredForWorkerDeclaration(BLangSimpleVariable variable) { BLangExpression expr = variable.expr; if (expr == null) { return false; } if (Symbols.isFlagOn(variable.symbol.flags, Flags.WORKER)) { return true; } return expr.getKind() == NodeKind.LAMBDA && ((BLangLambdaFunction) expr).function.flagSet.contains(Flag.WORKER); } @Override public void visit(BLangAssignment assignment) { analyzeNode(assignment.expr, env); checkAssignment(assignment.varRef); } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { analyzeNode(compoundAssignNode.expr, env); analyzeNode(compoundAssignNode.varRef, env); checkAssignment(compoundAssignNode.varRef); this.uninitializedVars.remove(compoundAssignNode.varRef.symbol); } @Override public void visit(BLangBreak breakNode) { terminateFlow(); } @Override public void visit(BLangReturn returnNode) { analyzeNode(returnNode.expr, env); terminateFlow(); } @Override public void visit(BLangXMLNSStatement xmlnsStmt) { analyzeNode(xmlnsStmt.xmlnsDecl, env); } @Override public void visit(BLangIf ifNode) { analyzeNode(ifNode.expr, env); BranchResult ifResult = analyzeBranch(ifNode.body, env); BranchResult elseResult = analyzeBranch(ifNode.elseStmt, env); if (ifResult.flowTerminated) { this.uninitializedVars = elseResult.uninitializedVars; return; } if (elseResult.flowTerminated || ConditionResolver.checkConstCondition(types, symTable, ifNode.expr) == symTable.trueType) { this.uninitializedVars = ifResult.uninitializedVars; return; } this.uninitializedVars = mergeUninitializedVars(ifResult.uninitializedVars, elseResult.uninitializedVars); } @Override public void visit(BLangMatchStatement matchStatement) { analyzeNode(matchStatement.expr, env); if (matchStatement.onFailClause != null) { analyzeNode(matchStatement.onFailClause, env); } Map<BSymbol, InitStatus> uninitVars = new HashMap<>(); BranchResult lastPatternResult = null; for (int i = 0; i < matchStatement.getMatchClauses().size(); i++) { BLangMatchClause matchClause = matchStatement.getMatchClauses().get(i); if (isLastPatternContainsIn(matchClause)) { lastPatternResult = analyzeBranch(matchClause, env); } else { BranchResult result = analyzeBranch(matchClause, env); if (result.flowTerminated) { continue; } uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars); } } if (lastPatternResult != null) { uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars); this.uninitializedVars = uninitVars; return; } uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars); this.uninitializedVars = uninitVars; } @Override public void visit(BLangMatchClause matchClause) { Location pos = matchClause.pos; for (BVarSymbol symbol : matchClause.declaredVars.values()) { if (!isWildCardBindingPattern(symbol)) { this.unusedLocalVariables.put(symbol, pos); } } analyzeNode(matchClause.matchGuard, env); analyzeNode(matchClause.blockStmt, env); } @Override public void visit(BLangMatchGuard matchGuard) { analyzeNode(matchGuard.expr, env); } private boolean isLastPatternContainsIn(BLangMatchClause matchClause) { for (BLangMatchPattern pattern : matchClause.matchPatterns) { if (pattern.isLastPattern) { return true; } } return false; } @Override public void visit(BLangMatch match) { analyzeNode(match.expr, env); if (match.onFailClause != null) { analyzeNode(match.onFailClause, env); } Map<BSymbol, InitStatus> uninitVars = new HashMap<>(); BranchResult lastPatternResult = null; for (BLangMatch.BLangMatchBindingPatternClause patternClause : match.patternClauses) { if (patternClause.isLastPattern) { lastPatternResult = analyzeBranch(patternClause, env); } else { BranchResult result = analyzeBranch(patternClause, env); if (result.flowTerminated) { continue; } uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars); } } if (lastPatternResult != null) { uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars); this.uninitializedVars = uninitVars; return; } uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars); this.uninitializedVars = uninitVars; } @Override public void visit(BLangForeach foreach) { BLangExpression collection = foreach.collection; if (isNotRangeExpr(collection)) { populateUnusedVariableMapForMembers(this.unusedLocalVariables, (BLangVariable) foreach.variableDefinitionNode.getVariable()); } analyzeNode(collection, env); analyzeNode(foreach.body, env); if (foreach.onFailClause != null) { analyzeNode(foreach.onFailClause, env); } } @Override public void visit(BLangQueryAction queryAction) { for (BLangNode clause : queryAction.getQueryClauses()) { analyzeNode(clause, env); } } @Override public void visit(BLangWhile whileNode) { Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars; analyzeNode(whileNode.expr, env); BranchResult whileResult = analyzeBranch(whileNode.body, env); if (whileNode.onFailClause != null) { analyzeNode(whileNode.onFailClause, env); } BType constCondition = ConditionResolver.checkConstCondition(types, symTable, whileNode.expr); if (constCondition == symTable.falseType) { this.uninitializedVars = prevUninitializedVars; return; } if (whileResult.flowTerminated || constCondition == symTable.trueType) { this.uninitializedVars = whileResult.uninitializedVars; return; } this.uninitializedVars = mergeUninitializedVars(this.uninitializedVars, whileResult.uninitializedVars); } @Override public void visit(BLangDo doNode) { analyzeNode(doNode.body, env); if (doNode.onFailClause != null) { analyzeNode(doNode.onFailClause, env); } } public void visit(BLangFail failNode) { analyzeNode(failNode.expr, env); } @Override public void visit(BLangLock lockNode) { analyzeNode(lockNode.body, this.env); if (lockNode.onFailClause != null) { analyzeNode(lockNode.onFailClause, env); } } @Override public void visit(BLangTransaction transactionNode) { analyzeNode(transactionNode.transactionBody, env); if (transactionNode.onFailClause != null) { analyzeNode(transactionNode.onFailClause, env); } Name transactionPkgName = names.fromString(Names.DOT.value + Names.TRANSACTION_PACKAGE.value); Name compUnitName = names.fromString(transactionNode.pos.lineRange().filePath()); this.symResolver.resolvePrefixSymbol(env, transactionPkgName, compUnitName); } @Override public void visit(BLangTransactionalExpr transactionalExpr) { } @Override public void visit(BLangCommitExpr commitExpr) { } @Override public void visit(BLangRollback rollbackNode) { analyzeNode(rollbackNode.expr, env); } @Override public void visit(BLangTupleDestructure stmt) { analyzeNode(stmt.expr, env); checkAssignment(stmt.varRef); } @Override public void visit(BLangForkJoin forkJoin) { /* ignore */ } @Override public void visit(BLangWorkerSend workerSendNode) { analyzeNode(workerSendNode.expr, env); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { analyzeNode(syncSendExpr.expr, env); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { } @Override public void visit(BLangLiteral literalExpr) { } @Override public void visit(BLangConstRef constRef) { } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { listConstructorExpr.exprs.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { tableConstructorExpr.recordLiteralList.forEach(expr -> analyzeNode(expr, env)); checkForDuplicateKeys(tableConstructorExpr); } private void checkForDuplicateKeys(BLangTableConstructorExpr tableConstructorExpr) { Set<Integer> keyHashSet = new HashSet<>(); List<String> fieldNames = getFieldNames(tableConstructorExpr); HashMap<Integer, List<BLangExpression>> keyValues = new HashMap<>(); if (!fieldNames.isEmpty()) { for (BLangRecordLiteral literal : tableConstructorExpr.recordLiteralList) { List<BLangExpression> keyArray = createKeyArray(literal, fieldNames); int hashInt = generateHash(keyArray); if (!keyHashSet.add(hashInt) && checkForKeyEquality(keyValues, keyArray, hashInt)) { String fields = String.join(", ", fieldNames); String values = keyArray.stream().map(Object::toString).collect(Collectors.joining(", ")); dlog.error(literal.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_TABLE_LITERAL, fields, values); } keyValues.put(hashInt, keyArray); } } } private boolean checkForKeyEquality(HashMap<Integer, List<BLangExpression>> keyValues, List<BLangExpression> keyArray, int hash) { List<BLangExpression> existingExpList = keyValues.get(hash); boolean isEqual = false; if (existingExpList.size() == keyArray.size()) { isEqual = true; for (int i = 0; i < keyArray.size(); i++) { isEqual = isEqual && equality(keyArray.get(i), existingExpList.get(i)); } } return isEqual; } private int generateHash(List<BLangExpression> keyArray) { int result = 0; for (BLangExpression expr : keyArray) { result = 31 * result + hash(expr); } return result; } public boolean equality(Node nodeA, Node nodeB) { if (nodeA == null || nodeB == null) { return nodeA == nodeB; } if (nodeA.getKind() != nodeB.getKind()) { return false; } boolean isEqual = true; switch (nodeA.getKind()) { case RECORD_LITERAL_EXPR: BLangRecordLiteral recordLiteralA = (BLangRecordLiteral) nodeA; BLangRecordLiteral recordLiteralB = (BLangRecordLiteral) nodeB; for (int i = 0; isEqual && i < recordLiteralA.fields.size(); i++) { RecordLiteralNode.RecordField exprA = recordLiteralA.fields.get(i); RecordLiteralNode.RecordField exprB = recordLiteralB.fields.get(i); isEqual = equality(exprA, exprB); } return isEqual; case RECORD_LITERAL_KEY_VALUE: BLangRecordLiteral.BLangRecordKeyValueField fieldA = (BLangRecordLiteral.BLangRecordKeyValueField) nodeA; BLangRecordLiteral.BLangRecordKeyValueField fieldB = (BLangRecordLiteral.BLangRecordKeyValueField) nodeB; return equality(fieldA.valueExpr, fieldB.valueExpr); case LITERAL: case NUMERIC_LITERAL: BLangLiteral literalA = (BLangLiteral) nodeA; BLangLiteral literalB = (BLangLiteral) nodeB; return Objects.equals(literalA.value, literalB.value); case XML_TEXT_LITERAL: BLangXMLTextLiteral textLiteralA = (BLangXMLTextLiteral) nodeA; BLangXMLTextLiteral textLiteralB = (BLangXMLTextLiteral) nodeB; isEqual = equality(textLiteralA.concatExpr, textLiteralB.concatExpr); for (int i = 0; isEqual && i < textLiteralA.textFragments.size(); i++) { BLangExpression exprA = textLiteralA.textFragments.get(i); BLangExpression exprB = textLiteralB.textFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XML_ATTRIBUTE: BLangXMLAttribute attributeA = (BLangXMLAttribute) nodeA; BLangXMLAttribute attributeB = (BLangXMLAttribute) nodeB; return equality(attributeA.name, attributeB.name) && equality(attributeA.value, attributeB.value); case XML_QNAME: BLangXMLQName xmlqNameA = (BLangXMLQName) nodeA; BLangXMLQName xmlqNameB = (BLangXMLQName) nodeA; return equality(xmlqNameA.localname, xmlqNameB.localname) && equality(xmlqNameA.prefix, xmlqNameB.prefix); case XML_ELEMENT_LITERAL: BLangXMLElementLiteral eleLiteralA = (BLangXMLElementLiteral) nodeA; BLangXMLElementLiteral eleLiteralB = (BLangXMLElementLiteral) nodeB; isEqual = equality(eleLiteralA.startTagName, eleLiteralB.startTagName) && equality(eleLiteralA.endTagName, eleLiteralB.endTagName); for (int i = 0; isEqual && i < eleLiteralA.attributes.size(); i++) { BLangExpression exprA = eleLiteralA.attributes.get(i); BLangExpression exprB = eleLiteralB.attributes.get(i); isEqual = equality(exprA, exprB); } for (int i = 0; isEqual && i < eleLiteralA.children.size(); i++) { BLangExpression exprA = eleLiteralA.children.get(i); BLangExpression exprB = eleLiteralB.children.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XML_COMMENT_LITERAL: BLangXMLCommentLiteral commentliteralA = (BLangXMLCommentLiteral) nodeA; BLangXMLCommentLiteral commentliteralB = (BLangXMLCommentLiteral) nodeB; isEqual = equality(commentliteralA.concatExpr, commentliteralB.concatExpr); for (int i = 0; isEqual && i < commentliteralA.textFragments.size(); i++) { BLangExpression exprA = commentliteralA.textFragments.get(i); BLangExpression exprB = commentliteralB.textFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XML_QUOTED_STRING: BLangXMLQuotedString quotedLiteralA = (BLangXMLQuotedString) nodeA; BLangXMLQuotedString quotedLiteralB = (BLangXMLQuotedString) nodeB; isEqual = equality(quotedLiteralA.concatExpr, quotedLiteralB.concatExpr); for (int i = 0; isEqual && i < quotedLiteralA.textFragments.size(); i++) { BLangExpression exprA = quotedLiteralA.textFragments.get(i); BLangExpression exprB = quotedLiteralB.textFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case XMLNS: BLangXMLNS xmlnsA = (BLangXMLNS) nodeA; BLangXMLNS xmlnsB = (BLangXMLNS) nodeB; return equality(xmlnsA.prefix, xmlnsB.prefix) && equality(xmlnsA.namespaceURI, xmlnsB.namespaceURI); case XML_PI_LITERAL: BLangXMLProcInsLiteral insLiteralA = (BLangXMLProcInsLiteral) nodeA; BLangXMLProcInsLiteral insLiteralB = (BLangXMLProcInsLiteral) nodeB; isEqual = equality(insLiteralA.target, insLiteralB.target) && equality(insLiteralA.dataConcatExpr, insLiteralB.dataConcatExpr); for (int i = 0; isEqual && i < insLiteralA.dataFragments.size(); i++) { BLangExpression exprA = insLiteralA.dataFragments.get(i); BLangExpression exprB = insLiteralB.dataFragments.get(i); isEqual = equality(exprA, exprB); } return isEqual; case IDENTIFIER: BLangIdentifier identifierA = (BLangIdentifier) nodeA; BLangIdentifier identifierB = (BLangIdentifier) nodeB; return identifierA.value.equals(identifierB.value); case SIMPLE_VARIABLE_REF: BLangSimpleVarRef simpleVarRefA = (BLangSimpleVarRef) nodeA; BLangSimpleVarRef simpleVarRefB = (BLangSimpleVarRef) nodeB; BSymbol symbolA = simpleVarRefA.symbol; BSymbol symbolB = simpleVarRefB.symbol; if (symbolA != null && symbolB != null && (Symbols.isFlagOn(symbolA.flags, Flags.CONSTANT) && Symbols.isFlagOn(symbolB.flags, Flags.CONSTANT))) { return (((BConstantSymbol) symbolA).value).value .equals((((BConstantSymbol) symbolB).value).value); } else { return simpleVarRefA.variableName.equals(simpleVarRefB.variableName); } case STRING_TEMPLATE_LITERAL: BLangStringTemplateLiteral stringTemplateLiteralA = (BLangStringTemplateLiteral) nodeA; BLangStringTemplateLiteral stringTemplateLiteralB = (BLangStringTemplateLiteral) nodeB; for (int i = 0; isEqual && i < stringTemplateLiteralA.exprs.size(); i++) { BLangExpression exprA = stringTemplateLiteralA.exprs.get(i); BLangExpression exprB = stringTemplateLiteralB.exprs.get(i); isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB); } return isEqual; case LIST_CONSTRUCTOR_EXPR: BLangListConstructorExpr listConstructorExprA = (BLangListConstructorExpr) nodeA; BLangListConstructorExpr listConstructorExprB = (BLangListConstructorExpr) nodeB; for (int i = 0; isEqual && i < listConstructorExprA.exprs.size(); i++) { BLangExpression exprA = listConstructorExprA.exprs.get(i); BLangExpression exprB = listConstructorExprB.exprs.get(i); isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB); } return isEqual; case TABLE_CONSTRUCTOR_EXPR: BLangTableConstructorExpr tableConstructorExprA = (BLangTableConstructorExpr) nodeA; BLangTableConstructorExpr tableConstructorExprB = (BLangTableConstructorExpr) nodeB; for (int i = 0; isEqual && i < tableConstructorExprA.recordLiteralList.size(); i++) { BLangExpression exprA = tableConstructorExprA.recordLiteralList.get(i); BLangExpression exprB = tableConstructorExprB.recordLiteralList.get(i); isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB); } return isEqual; case TYPE_CONVERSION_EXPR: BLangTypeConversionExpr typeConversionExprA = (BLangTypeConversionExpr) nodeA; BLangTypeConversionExpr typeConversionExprB = (BLangTypeConversionExpr) nodeB; return equality(typeConversionExprA.expr, typeConversionExprB.expr); case BINARY_EXPR: BLangBinaryExpr binaryExprA = (BLangBinaryExpr) nodeA; BLangBinaryExpr binaryExprB = (BLangBinaryExpr) nodeB; return equality(binaryExprA.lhsExpr, binaryExprB.lhsExpr) && equality(binaryExprA.rhsExpr, binaryExprB.rhsExpr); case UNARY_EXPR: BLangUnaryExpr unaryExprA = (BLangUnaryExpr) nodeA; BLangUnaryExpr unaryExprB = (BLangUnaryExpr) nodeB; return equality(unaryExprA.expr, unaryExprB.expr); case TYPE_TEST_EXPR: BLangTypeTestExpr typeTestExprA = (BLangTypeTestExpr) nodeA; BLangTypeTestExpr typeTestExprB = (BLangTypeTestExpr) nodeB; return equality(typeTestExprA.expr, typeTestExprB.expr); case TERNARY_EXPR: BLangTernaryExpr ternaryExprA = (BLangTernaryExpr) nodeA; BLangTernaryExpr ternaryExprB = (BLangTernaryExpr) nodeB; return equality(ternaryExprA.expr, ternaryExprB.expr) && equality(ternaryExprA.thenExpr, ternaryExprB.thenExpr) && equality(ternaryExprA.elseExpr, ternaryExprB.elseExpr); case GROUP_EXPR: BLangGroupExpr groupExprA = (BLangGroupExpr) nodeA; BLangGroupExpr groupExprB = (BLangGroupExpr) nodeA; return equality(groupExprA.expression, groupExprB.expression); default: return false; } } public Integer hash(Node node) { int result = 0; if (node == null) { return result; } if (node.getKind() == NodeKind.RECORD_LITERAL_EXPR) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) node; for (RecordLiteralNode.RecordField entry : recordLiteral.fields) { result = 31 * result + hash(entry); } } else if (node.getKind() == NodeKind.RECORD_LITERAL_KEY_VALUE) { BLangRecordLiteral.BLangRecordKeyValueField field = (BLangRecordLiteral.BLangRecordKeyValueField) node; result = 31 * result + hash(field.key.expr) + hash(field.valueExpr); } else if (node.getKind() == NodeKind.ARRAY_LITERAL_EXPR) { BLangListConstructorExpr.BLangArrayLiteral arrayLiteral = (BLangListConstructorExpr.BLangArrayLiteral) node; for (BLangExpression expr : arrayLiteral.exprs) { result = 31 * result + hash(expr); } } else if (node.getKind() == NodeKind.LITERAL | node.getKind() == NodeKind.NUMERIC_LITERAL) { BLangLiteral literal = (BLangLiteral) node; result = Objects.hash(literal.value); } else if (node.getKind() == NodeKind.XML_TEXT_LITERAL) { BLangXMLTextLiteral literal = (BLangXMLTextLiteral) node; result = 31 * result + hash(literal.concatExpr); for (BLangExpression expr : literal.textFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.XML_ATTRIBUTE) { BLangXMLAttribute attribute = (BLangXMLAttribute) node; result = 31 * result + hash(attribute.name) + hash(attribute.value); } else if (node.getKind() == NodeKind.XML_QNAME) { BLangXMLQName xmlqName = (BLangXMLQName) node; result = 31 * result + hash(xmlqName.localname) + hash(xmlqName.prefix); } else if (node.getKind() == NodeKind.XML_COMMENT_LITERAL) { BLangXMLCommentLiteral literal = (BLangXMLCommentLiteral) node; result = 31 * result + hash(literal.concatExpr); for (BLangExpression expr : literal.textFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.XML_ELEMENT_LITERAL) { BLangXMLElementLiteral literal = (BLangXMLElementLiteral) node; result = 31 * result + hash(literal.startTagName) + hash(literal.endTagName); for (BLangExpression expr : literal.attributes) { result = 31 * result + hash(expr); } for (BLangExpression expr : literal.children) { result = 31 * result + hash(expr); } } else if (node.getKind() == NodeKind.XML_QUOTED_STRING) { BLangXMLQuotedString literal = (BLangXMLQuotedString) node; result = 31 * result + hash(literal.concatExpr); for (BLangExpression expr : literal.textFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.XMLNS) { BLangXMLNS xmlns = (BLangXMLNS) node; result = result * 31 + hash(xmlns.prefix) + hash(xmlns.namespaceURI); } else if (node.getKind() == NodeKind.XML_PI_LITERAL) { BLangXMLProcInsLiteral literal = (BLangXMLProcInsLiteral) node; result = 31 * result + hash(literal.target) + hash(literal.dataConcatExpr); for (BLangExpression expr : literal.dataFragments) { result = result * 31 + hash(expr); } } else if (node.getKind() == NodeKind.IDENTIFIER) { BLangIdentifier identifier = (BLangIdentifier) node; result = identifier.value.hashCode(); } else if (node.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) node; BSymbol symbol = simpleVarRef.symbol; if (symbol != null && Symbols.isFlagOn(symbol.flags, Flags.CONSTANT)) { BConstantSymbol constantSymbol = (BConstantSymbol) symbol; result = Objects.hash(constantSymbol.value.value); } else { result = simpleVarRef.variableName.hashCode(); } } else if (node.getKind() == NodeKind.STRING_TEMPLATE_LITERAL) { BLangStringTemplateLiteral stringTemplateLiteral = (BLangStringTemplateLiteral) node; for (BLangExpression expr : stringTemplateLiteral.exprs) { result = result * 31 + getTypeHash(stringTemplateLiteral.getBType()) + hash(expr); } } else if (node.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr) node; for (BLangExpression expr : listConstructorExpr.exprs) { result = result * 31 + getTypeHash(listConstructorExpr.getBType()) + hash(expr); } } else if (node.getKind() == NodeKind.TABLE_CONSTRUCTOR_EXPR) { BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) node; for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { result = result * 31 + getTypeHash(tableConstructorExpr.getBType()) + hash(recordLiteral); } } else if (node.getKind() == NodeKind.TYPE_CONVERSION_EXPR) { BLangTypeConversionExpr typeConversionExpr = (BLangTypeConversionExpr) node; result = 31 * result + hash(typeConversionExpr.expr); } else if (node.getKind() == NodeKind.BINARY_EXPR) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) node; result = 31 * result + hash(binaryExpr.lhsExpr) + hash(binaryExpr.rhsExpr); } else if (node.getKind() == NodeKind.UNARY_EXPR) { BLangUnaryExpr unaryExpr = (BLangUnaryExpr) node; result = 31 * result + hash(unaryExpr.expr); } else if (node.getKind() == NodeKind.TYPE_TEST_EXPR) { BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) node; result = 31 * result + hash(typeTestExpr.expr); } else if (node.getKind() == NodeKind.TERNARY_EXPR) { BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) node; result = 31 * result + hash(ternaryExpr.expr) + hash(ternaryExpr.thenExpr) + hash(ternaryExpr.elseExpr); } else if (node.getKind() == NodeKind.GROUP_EXPR) { BLangGroupExpr groupExpr = (BLangGroupExpr) node; result = 31 * result + hash(groupExpr.expression); } else { dlog.error(((BLangExpression) node).pos, DiagnosticErrorCode.EXPRESSION_IS_NOT_A_CONSTANT_EXPRESSION); } return result; } private Integer getTypeHash(BType type) { return Objects.hash(type.tag, type.name); } private List<BLangExpression> createKeyArray(BLangRecordLiteral literal, List<String> fieldNames) { Map<String, BLangExpression> fieldMap = new HashMap<>(); for (RecordLiteralNode.RecordField recordField : literal.fields) { if (recordField.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyVal = (BLangRecordLiteral.BLangRecordKeyValueField) recordField; fieldMap.put(keyVal.key.expr.toString(), keyVal.valueExpr); } else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField recordVarNameField = (BLangRecordLiteral.BLangRecordVarNameField) recordField; fieldMap.put(recordVarNameField.getVariableName().value, recordVarNameField); } } return fieldNames.stream().map(fieldMap::get).collect(Collectors.toList()); } private List<String> getFieldNames(BLangTableConstructorExpr constructorExpr) { List<String> fieldNames = null; if (Types.getReferredType(constructorExpr.getBType()).tag == TypeTags.TABLE) { fieldNames = ((BTableType) Types.getReferredType(constructorExpr.getBType())).fieldNameList; if (fieldNames != null) { return fieldNames; } } if (constructorExpr.tableKeySpecifier != null && !constructorExpr.tableKeySpecifier.fieldNameIdentifierList.isEmpty()) { BLangTableKeySpecifier tableKeySpecifier = constructorExpr.tableKeySpecifier; return tableKeySpecifier.fieldNameIdentifierList.stream().map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); } else { return new ArrayList<>(); } } @Override public void visit(BLangRecordLiteral recordLiteral) { for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; if (keyValuePair.key.computedKey) { analyzeNode(keyValuePair.key.expr, env); } analyzeNode(keyValuePair.valueExpr, env); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { analyzeNode((BLangRecordLiteral.BLangRecordVarNameField) field, env); } else { analyzeNode(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env); } } } @Override public void visit(BLangSimpleVarRef varRefExpr) { this.unusedErrorVarsDeclaredWithVar.remove(varRefExpr.symbol); if (isNotVariableReferenceLVExpr(varRefExpr)) { this.unusedLocalVariables.remove(varRefExpr.symbol); } checkVarRef(varRefExpr.symbol, varRefExpr.pos); } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { if (!fieldAccessExpr.isLValue && isObjectMemberAccessWithSelf(fieldAccessExpr)) { checkVarRef(fieldAccessExpr.symbol, fieldAccessExpr.pos); } analyzeNode(fieldAccessExpr.expr, env); } @Override public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { if (!nsPrefixedFieldBasedAccess.isLValue && isObjectMemberAccessWithSelf(nsPrefixedFieldBasedAccess)) { checkVarRef(nsPrefixedFieldBasedAccess.symbol, nsPrefixedFieldBasedAccess.pos); } analyzeNode(nsPrefixedFieldBasedAccess.expr, env); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeNode(indexAccessExpr.expr, env); analyzeNode(indexAccessExpr.indexExpr, env); } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { tableMultiKeyExpr.multiKeyIndexExprs.forEach(value -> analyzeNode(value, env)); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { analyzeNode(xmlElementAccess.expr, env); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { analyzeNode(xmlNavigation.expr, env); if (xmlNavigation.childIndex == null) { analyzeNode(xmlNavigation.childIndex, env); } } @Override public void visit(BLangInvocation invocationExpr) { analyzeNode(invocationExpr.expr, env); BSymbol symbol = invocationExpr.symbol; this.unusedLocalVariables.remove(symbol); if (!isGlobalVarsInitialized(invocationExpr.pos)) { return; } if (!isFieldsInitializedForSelfArgument(invocationExpr)) { return; } if (!isFieldsInitializedForSelfInvocation(invocationExpr.requiredArgs, invocationExpr.pos)) { return; } if (!isFieldsInitializedForSelfInvocation(invocationExpr.restArgs, invocationExpr.pos)) { return; } checkVarRef(symbol, invocationExpr.pos); invocationExpr.requiredArgs.forEach(expr -> analyzeNode(expr, env)); invocationExpr.restArgs.forEach(expr -> analyzeNode(expr, env)); BSymbol owner = this.env.scope.owner; if (owner.kind == SymbolKind.FUNCTION) { BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) owner; Name name = names.fromIdNode(invocationExpr.name); BSymbol dependsOnFunctionSym = symResolver.lookupSymbolInMainSpace(this.env, name); if (symTable.notFoundSymbol != dependsOnFunctionSym) { addDependency(invokableOwnerSymbol, dependsOnFunctionSym); } } else if (symbol != null && symbol.kind == SymbolKind.FUNCTION) { BInvokableSymbol invokableProviderSymbol = (BInvokableSymbol) symbol; BSymbol curDependent = this.currDependentSymbolDeque.peek(); if (curDependent != null && isGlobalVarSymbol(curDependent)) { addDependency(curDependent, invokableProviderSymbol); } } } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { for (BLangExpression positionalArg : errorConstructorExpr.positionalArgs) { analyzeNode(positionalArg, env); } for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) { analyzeNode(namedArg, env); } } @Override public void visit(BLangActionInvocation actionInvocation) { this.visit((BLangInvocation) actionInvocation); } @Override public void visit(BLangQueryExpr queryExpr) { for (BLangNode clause : queryExpr.getQueryClauses()) { analyzeNode(clause, env); } } @Override public void visit(BLangFromClause fromClause) { BLangExpression collection = fromClause.collection; if (isNotRangeExpr(collection)) { populateUnusedVariableMapForMembers(this.unusedLocalVariables, (BLangVariable) fromClause.variableDefinitionNode.getVariable()); } analyzeNode(collection, env); } @Override public void visit(BLangJoinClause joinClause) { populateUnusedVariableMapForMembers(this.unusedLocalVariables, (BLangVariable) joinClause.variableDefinitionNode.getVariable()); analyzeNode(joinClause.collection, env); if (joinClause.onClause != null) { analyzeNode((BLangNode) joinClause.onClause, env); } } @Override public void visit(BLangLetClause letClause) { for (BLangLetVariable letVariable : letClause.letVarDeclarations) { analyzeNode((BLangNode) letVariable.definitionNode, env); } } @Override public void visit(BLangWhereClause whereClause) { analyzeNode(whereClause.expression, env); } @Override public void visit(BLangOnClause onClause) { analyzeNode(onClause.lhsExpr, env); analyzeNode(onClause.rhsExpr, env); } @Override public void visit(BLangOrderKey orderKeyClause) { analyzeNode(orderKeyClause.expression, env); } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.orderByKeyList.forEach(value -> analyzeNode((BLangNode) value, env)); } @Override public void visit(BLangSelectClause selectClause) { analyzeNode(selectClause.expression, env); } @Override public void visit(BLangOnConflictClause onConflictClause) { analyzeNode(onConflictClause.expression, env); } @Override public void visit(BLangLimitClause limitClause) { analyzeNode(limitClause.expression, env); } @Override public void visit(BLangDoClause doClause) { analyzeNode(doClause.body, env); } @Override public void visit(BLangOnFailClause onFailClause) { analyzeNode((BLangVariable) onFailClause.variableDefinitionNode.getVariable(), env); analyzeNode(onFailClause.body, env); } private boolean isFieldsInitializedForSelfArgument(BLangInvocation invocationExpr) { if (invocationExpr.expr == null || !isSelfKeyWordExpr(invocationExpr.expr)) { return true; } StringBuilder uninitializedFields = getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef) invocationExpr.expr).symbol.type); if (uninitializedFields.length() != 0) { this.dlog.error(invocationExpr.pos, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS, uninitializedFields.toString()); return false; } return true; } private boolean isFieldsInitializedForSelfInvocation(List<BLangExpression> argExpressions, Location location) { for (BLangExpression expr : argExpressions) { if (isSelfKeyWordExpr(expr)) { StringBuilder uninitializedFields = getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef) expr).symbol.type); if (uninitializedFields.length() != 0) { this.dlog.error(location, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS, uninitializedFields.toString()); return false; } } } return true; } private boolean isGlobalVarsInitialized(Location pos) { if (env.isModuleInit) { boolean isFirstUninitializedField = true; StringBuilder uninitializedFields = new StringBuilder(); for (BSymbol symbol : this.uninitializedVars.keySet()) { if (isFirstUninitializedField) { uninitializedFields = new StringBuilder(symbol.getName().value); isFirstUninitializedField = false; } else { uninitializedFields.append(", ").append(symbol.getName().value); } } if (uninitializedFields.length() != 0) { this.dlog.error(pos, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_VARIABLES, uninitializedFields.toString()); return false; } } return true; } private boolean isSelfKeyWordExpr(BLangExpression expr) { return expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && Names.SELF.value.equals(((BLangSimpleVarRef) expr).getVariableName().getValue()); } private StringBuilder getUninitializedFieldsForSelfKeyword(BObjectType objType) { boolean isFirstUninitializedField = true; StringBuilder uninitializedFields = new StringBuilder(); for (BField field : objType.fields.values()) { if (this.uninitializedVars.containsKey(field.symbol)) { if (isFirstUninitializedField) { uninitializedFields = new StringBuilder(field.symbol.getName().value); isFirstUninitializedField = false; } else { uninitializedFields.append(", ").append(field.symbol.getName().value); } } } return uninitializedFields; } private boolean isGlobalVarSymbol(BSymbol symbol) { if (symbol == null) { return false; } else if (symbol.owner == null) { return false; } else if (symbol.owner.tag != SymTag.PACKAGE) { return false; } return isVariableOrConstant(symbol); } private boolean isVariableOrConstant(BSymbol symbol) { if (symbol == null) { return false; } return ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) || ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT); } /** * Register dependent symbol to the provider symbol. * Let global int a = b, a depend on b. * Let func foo() { returns b + 1; }, where b is a global var, then foo depends on b. * * @param dependent dependent. * @param provider object which provides a value. */ private void addDependency(BSymbol dependent, BSymbol provider) { if (provider == null || dependent == null || dependent.pkgID != provider.pkgID) { return; } Set<BSymbol> providers = globalNodeDependsOn.computeIfAbsent(dependent, s -> new LinkedHashSet<>()); providers.add(provider); addFunctionToGlobalVarDependency(dependent, provider); } private void addFunctionToGlobalVarDependency(BSymbol dependent, BSymbol provider) { if (dependent.kind != SymbolKind.FUNCTION && !isGlobalVarSymbol(dependent)) { return; } if (isVariableOrConstant(provider) && !isGlobalVarSymbol(provider)) { return; } Set<BSymbol> providers = this.functionToDependency.computeIfAbsent(dependent, s -> new HashSet<>()); providers.add(provider); } @Override public void visit(BLangTypeInit typeInitExpr) { typeInitExpr.argsExpr.forEach(argExpr -> analyzeNode(argExpr, env)); if (this.currDependentSymbolDeque.peek() != null) { addDependency(this.currDependentSymbolDeque.peek(), Types.getReferredType(typeInitExpr.getBType()).tsymbol); } } @Override public void visit(BLangTernaryExpr ternaryExpr) { analyzeNode(ternaryExpr.expr, env); analyzeNode(ternaryExpr.thenExpr, env); analyzeNode(ternaryExpr.elseExpr, env); } @Override public void visit(BLangWaitExpr waitExpr) { analyzeNode(waitExpr.getExpression(), env); } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { waitForAllExpr.keyValuePairs.forEach(keyValue -> { BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr; analyzeNode(expr, env); }); } @Override public void visit(BLangBinaryExpr binaryExpr) { analyzeNode(binaryExpr.lhsExpr, env); analyzeNode(binaryExpr.rhsExpr, env); } @Override public void visit(BLangElvisExpr elvisExpr) { analyzeNode(elvisExpr.lhsExpr, env); analyzeNode(elvisExpr.rhsExpr, env); } @Override public void visit(BLangGroupExpr groupExpr) { analyzeNode(groupExpr.expression, env); } @Override public void visit(BLangUnaryExpr unaryExpr) { analyzeNode(unaryExpr.expr, env); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { analyzeNode(conversionExpr.expr, env); } @Override public void visit(BLangXMLAttribute xmlAttribute) { analyzeNode(xmlAttribute.value, env); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.children.forEach(expr -> analyzeNode(expr, env)); xmlElementLiteral.attributes.forEach(expr -> analyzeNode(expr, env)); xmlElementLiteral.inlineNamespaces.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.textFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.textFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.dataFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.textFragments.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { stringTemplateLiteral.exprs.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { for (BLangLiteral string : rawTemplateLiteral.strings) { analyzeNode(string, env); } for (BLangExpression expr : rawTemplateLiteral.insertions) { analyzeNode(expr, env); } } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { BLangFunction funcNode = bLangLambdaFunction.function; SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); visitFunctionBodyWithDynamicEnv(funcNode, funcEnv); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { analyzeNode(bLangVarArgsExpression.expr, env); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { analyzeNode(bLangNamedArgsExpression.expr, env); } @Override public void visit(BLangIsAssignableExpr assignableExpr) { } @Override public void visit(BLangMatchExpression matchExpression) { analyzeNode(matchExpression.expr, env); matchExpression.patternClauses.forEach(pattern -> analyzeNode(pattern, env)); } @Override public void visit(BLangMatchExprPatternClause matchExprPatternClause) { analyzeNode(matchExprPatternClause.expr, env); } @Override public void visit(BLangCheckedExpr checkedExpr) { analyzeNode(checkedExpr.expr, env); } @Override public void visit(BLangCheckPanickedExpr checkPanicExpr) { analyzeNode(checkPanicExpr.expr, env); } @Override public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { bLangXMLSequenceLiteral.xmlItems.forEach(xml -> analyzeNode(xml, env)); } @Override public void visit(BLangExpressionStmt exprStmtNode) { analyzeNode(exprStmtNode.expr, env); } @Override public void visit(BLangAnnotation annotationNode) { } @Override public void visit(BLangAnnotationAttachment annAttachmentNode) { } @Override public void visit(BLangRetry retryNode) { analyzeNode(retryNode.retryBody, env); if (retryNode.onFailClause != null) { analyzeNode(retryNode.onFailClause, env); } } @Override public void visit(BLangRetryTransaction retryTransaction) { analyzeNode(retryTransaction.transaction, env); } @Override public void visit(BLangContinue continueNode) { terminateFlow(); } @Override public void visit(BLangTypedescExpr accessExpr) { } @Override public void visit(BLangXMLQName xmlQName) { } @Override public void visit(BLangArrowFunction bLangArrowFunction) { for (ClosureVarSymbol closureVarSymbol : bLangArrowFunction.closureVarSymbols) { BSymbol symbol = closureVarSymbol.bSymbol; if (this.uninitializedVars.containsKey(symbol)) { this.dlog.error(closureVarSymbol.diagnosticLocation, DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol); } this.unusedErrorVarsDeclaredWithVar.remove(symbol); this.unusedLocalVariables.remove(symbol); } } @Override public void visit(BLangValueType valueType) { } @Override public void visit(BLangConstant constant) { boolean validVariable = constant.symbol != null; if (validVariable) { this.currDependentSymbolDeque.push(constant.symbol); } try { analyzeNode(constant.expr, env); } finally { if (validVariable) { this.currDependentSymbolDeque.pop(); } } } @Override public void visit(BLangArrayType arrayType) { analyzeNode(arrayType.getElementType(), env); } @Override public void visit(BLangBuiltInRefTypeNode builtInRefType) { } @Override public void visit(BLangConstrainedType constrainedType) { analyzeNode(constrainedType.constraint, env); } @Override public void visit(BLangStreamType streamType) { analyzeNode(streamType.constraint, env); analyzeNode(streamType.error, env); } @Override public void visit(BLangTableTypeNode tableType) { analyzeNode(tableType.constraint, env); if (tableType.tableKeyTypeConstraint != null) { analyzeNode(tableType.tableKeyTypeConstraint.keyType, env); } } @Override public void visit(BLangUserDefinedType userDefinedType) { if (this.currDependentSymbolDeque.isEmpty()) { return; } BType resolvedType = Types.getReferredType(userDefinedType.getBType()); if (resolvedType == symTable.semanticError) { return; } BTypeSymbol tsymbol = resolvedType.tsymbol; recordGlobalVariableReferenceRelationship(tsymbol); } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { if (functionTypeNode.flagSet.contains(Flag.ANY_FUNCTION)) { return; } functionTypeNode.params.forEach(param -> analyzeNode(param.typeNode, env)); analyzeNode(functionTypeNode.returnTypeNode, env); } @Override public void visit(BLangUnionTypeNode unionTypeNode) { unionTypeNode.memberTypeNodes.forEach(typeNode -> analyzeNode(typeNode, env)); } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) { analyzeNode(constituentTypeNode, env); } } @Override public void visit(BLangObjectTypeNode objectTypeNode) { } @Override public void visit(BLangRecordTypeNode recordTypeNode) { BTypeSymbol tsymbol = Types.getReferredType(recordTypeNode.getBType()).tsymbol; for (TypeNode type : recordTypeNode.getTypeReferences()) { BLangType bLangType = (BLangType) type; analyzeNode(bLangType, env); recordGlobalVariableReferenceRelationship( Types.getReferredType(bLangType.getBType()).tsymbol); } for (BLangSimpleVariable field : recordTypeNode.fields) { addTypeDependency(tsymbol, Types.getReferredType(field.getBType()), new HashSet<>()); analyzeNode(field, env); recordGlobalVariableReferenceRelationship(field.symbol); } } private void addTypeDependency(BTypeSymbol dependentTypeSymbol, BType providerType, Set<BType> unresolvedTypes) { if (unresolvedTypes.contains(providerType)) { return; } unresolvedTypes.add(providerType); switch (providerType.tag) { case TypeTags.UNION: for (BType memberType : ((BUnionType) providerType).getMemberTypes()) { BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(memberType); addTypeDependency(dependentTypeSymbol, effectiveType, unresolvedTypes); } break; case TypeTags.ARRAY: addTypeDependency(dependentTypeSymbol, types.getTypeWithEffectiveIntersectionTypes(((BArrayType) providerType).getElementType()), unresolvedTypes); break; case TypeTags.MAP: addTypeDependency(dependentTypeSymbol, types.getTypeWithEffectiveIntersectionTypes(((BMapType) providerType).getConstraint()), unresolvedTypes); break; case TypeTags.TYPEREFDESC: addTypeDependency(dependentTypeSymbol, Types.getReferredType(providerType), unresolvedTypes); break; default: addDependency(dependentTypeSymbol, providerType.tsymbol); } } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { finiteTypeNode.valueSpace.forEach(value -> analyzeNode(value, env)); } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { tupleTypeNode.memberTypeNodes.forEach(type -> analyzeNode(type, env)); } @Override public void visit(BLangMarkdownDocumentationLine bLangMarkdownDocumentationLine) { } @Override public void visit(BLangMarkdownParameterDocumentation bLangDocumentationParameter) { } @Override public void visit(BLangMarkdownReturnParameterDocumentation bLangMarkdownReturnParameterDocumentation) { } @Override public void visit(BLangMarkdownDocumentation bLangMarkdownDocumentation) { } @Override public void visit(BLangTestablePackage testablePkgNode) { } @Override public void visit(BLangImportPackage importPkgNode) { } @Override public void visit(BLangIdentifier identifierNode) { } @Override public void visit(BLangPanic panicNode) { analyzeNode(panicNode.expr, env); terminateFlow(); } @Override public void visit(BLangTrapExpr trapExpr) { analyzeNode(trapExpr.expr, env); } public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { if (this.currDependentSymbolDeque.peek() != null) { addDependency(this.currDependentSymbolDeque.peek(), Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol); } addDependency(Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol, serviceConstructorExpr.serviceNode.symbol); analyzeNode(serviceConstructorExpr.serviceNode, env); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { analyzeNode(typeTestExpr.expr, env); analyzeNode(typeTestExpr.typeNode, env); } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { analyzeNode(annotAccessExpr.expr, env); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { } @Override public void visit(BLangErrorType errorType) { } @Override public void visit(BLangRecordDestructure recordDestructure) { analyzeNode(recordDestructure.expr, env); checkAssignment(recordDestructure.varRef); } @Override public void visit(BLangErrorDestructure errorDestructure) { analyzeNode(errorDestructure.expr, env); checkAssignment(errorDestructure.varRef); } @Override public void visit(BLangTupleVarRef tupleVarRefExpr) { tupleVarRefExpr.expressions.forEach(expr -> analyzeNode(expr, env)); } @Override public void visit(BLangRecordVarRef varRefExpr) { varRefExpr.recordRefFields.forEach(expr -> analyzeNode(expr.variableReference, env)); } @Override public void visit(BLangErrorVarRef varRefExpr) { analyzeNode(varRefExpr.message, env); if (varRefExpr.cause != null) { analyzeNode(varRefExpr.cause, env); } for (BLangNamedArgsExpression args : varRefExpr.detail) { analyzeNode(args.expr, env); } analyzeNode(varRefExpr.restVar, env); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { analyzeNode(bLangTupleVariable.typeNode, env); populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangTupleVariable); this.currDependentSymbolDeque.push(bLangTupleVariable.symbol); analyzeNode(bLangTupleVariable.expr, env); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { analyzeNode(bLangTupleVariableDef.var, env); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { analyzeNode(bLangRecordVariable.typeNode, env); populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangRecordVariable); this.currDependentSymbolDeque.push(bLangRecordVariable.symbol); analyzeNode(bLangRecordVariable.expr, env); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { analyzeNode(bLangRecordVariableDef.var, env); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { analyzeNode(bLangErrorVariable.typeNode, env); populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangErrorVariable); this.currDependentSymbolDeque.push(bLangErrorVariable.symbol); analyzeNode(bLangErrorVariable.expr, env); this.currDependentSymbolDeque.pop(); } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { analyzeNode(bLangErrorVariableDef.errorVariable, env); } @Override public void visit(BLangMatchStaticBindingPatternClause bLangMatchStaticBindingPatternClause) { analyzeNode(bLangMatchStaticBindingPatternClause.body, env); } @Override public void visit(BLangMatchStructuredBindingPatternClause bLangMatchStructuredBindingPatternClause) { analyzeNode(bLangMatchStructuredBindingPatternClause.body, env); } private void addUninitializedVar(BLangVariable variable) { if (!this.uninitializedVars.containsKey(variable.symbol)) { this.uninitializedVars.put(variable.symbol, InitStatus.UN_INIT); } } /** * Analyze a branch and returns the set of uninitialized variables for that branch. * This method will not update the current uninitialized variables set. * * @param node Branch node to be analyzed * @param env Symbol environment * @return Result of the branch. */ private BranchResult analyzeBranch(BLangNode node, SymbolEnv env) { Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars; boolean prevFlowTerminated = this.flowTerminated; this.uninitializedVars = copyUninitializedVars(); this.flowTerminated = false; analyzeNode(node, env); BranchResult brachResult = new BranchResult(this.uninitializedVars, this.flowTerminated); this.uninitializedVars = prevUninitializedVars; this.flowTerminated = prevFlowTerminated; return brachResult; } private Map<BSymbol, InitStatus> copyUninitializedVars() { return new HashMap<>(this.uninitializedVars); } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; if (node != null) { node.accept(this); } this.env = prevEnv; } private Map<BSymbol, InitStatus> mergeUninitializedVars(Map<BSymbol, InitStatus> firstUninitVars, Map<BSymbol, InitStatus> secondUninitVars) { List<BSymbol> intersection = new ArrayList<>(firstUninitVars.keySet()); intersection.retainAll(secondUninitVars.keySet()); return Stream.concat(firstUninitVars.entrySet().stream(), secondUninitVars.entrySet().stream()) .collect(Collectors.toMap(entry -> entry.getKey(), entry -> intersection.contains(entry.getKey()) ? entry.getValue() : InitStatus.PARTIAL_INIT, (a, b) -> { if (a == InitStatus.PARTIAL_INIT || b == InitStatus.PARTIAL_INIT) { return InitStatus.PARTIAL_INIT; } return InitStatus.UN_INIT; })); } private void checkVarRef(BSymbol symbol, Location pos) { recordGlobalVariableReferenceRelationship(symbol); InitStatus initStatus = this.uninitializedVars.get(symbol); if (initStatus == null) { return; } if (initStatus == InitStatus.UN_INIT) { this.dlog.error(pos, DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol); return; } this.dlog.error(pos, DiagnosticErrorCode.PARTIALLY_INITIALIZED_VARIABLE, symbol); } private void recordGlobalVariableReferenceRelationship(BSymbol symbol) { if (this.env.scope == null) { return; } boolean globalVarSymbol = isGlobalVarSymbol(symbol); BSymbol ownerSymbol = this.env.scope.owner; boolean isInPkgLevel = ownerSymbol.getKind() == SymbolKind.PACKAGE; if (isInPkgLevel && (globalVarSymbol || symbol instanceof BTypeSymbol) || (ownerSymbol.tag == SymTag.LET && globalVarSymbol)) { BSymbol dependent = this.currDependentSymbolDeque.peek(); addDependency(dependent, symbol); } else if (ownerSymbol.kind == SymbolKind.FUNCTION && globalVarSymbol) { BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) ownerSymbol; addDependency(invokableOwnerSymbol, symbol); } else if (ownerSymbol.kind == SymbolKind.OBJECT && globalVarSymbol) { addDependency(ownerSymbol, symbol); } else if (ownerSymbol.kind == SymbolKind.RECORD && globalVarSymbol) { addDependency(ownerSymbol, symbol); } } private boolean isObjectMemberAccessWithSelf(BLangAccessExpression fieldAccessExpr) { if (fieldAccessExpr.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return Names.SELF.value.equals(((BLangSimpleVarRef) fieldAccessExpr.expr).variableName.value); } private void checkAssignment(BLangExpression varRef) { NodeKind kind = varRef.getKind(); switch (kind) { case RECORD_VARIABLE_REF: BLangRecordVarRef recordVarRef = (BLangRecordVarRef) varRef; recordVarRef.recordRefFields.forEach(field -> checkAssignment(field.variableReference)); if (recordVarRef.restParam != null) { checkAssignment((BLangExpression) recordVarRef.restParam); } return; case TUPLE_VARIABLE_REF: BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) varRef; tupleVarRef.expressions.forEach(this::checkAssignment); if (tupleVarRef.restParam != null) { checkAssignment((BLangExpression) tupleVarRef.restParam); } return; case ERROR_VARIABLE_REF: BLangErrorVarRef errorVarRef = (BLangErrorVarRef) varRef; if (errorVarRef.message != null) { checkAssignment(errorVarRef.message); } if (errorVarRef.cause != null) { checkAssignment(errorVarRef.cause); } for (BLangNamedArgsExpression expression : errorVarRef.detail) { checkAssignment(expression); this.uninitializedVars.remove(((BLangVariableReference) expression.expr).symbol); } if (errorVarRef.restVar != null) { checkAssignment(errorVarRef.restVar); } return; case INDEX_BASED_ACCESS_EXPR: case FIELD_BASED_ACCESS_EXPR: BLangAccessExpression accessExpr = (BLangAccessExpression) varRef; BLangExpression expr = accessExpr.expr; BType type = Types.getReferredType(expr.getBType()); if (isObjectMemberAccessWithSelf(accessExpr)) { BObjectType objectType = (BObjectType) type; BSymbol symbol = accessExpr.symbol; if (this.uninitializedVars.containsKey(symbol)) { this.uninitializedVars.remove(symbol); return; } String fieldName = ((BLangFieldBasedAccess) varRef).field.value; checkFinalEntityUpdate(varRef.pos, fieldName, objectType.fields.get(fieldName).symbol); return; } if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { checkFinalObjectFieldUpdate((BLangFieldBasedAccess) accessExpr); } analyzeNode(expr, env); if (kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { analyzeNode(((BLangIndexBasedAccess) varRef).indexExpr, env); } return; default: break; } if (kind != NodeKind.SIMPLE_VARIABLE_REF && kind != NodeKind.XML_ATTRIBUTE_ACCESS_EXPR) { return; } if (kind == NodeKind.SIMPLE_VARIABLE_REF) { BSymbol symbol = ((BLangSimpleVarRef) varRef).symbol; checkFinalEntityUpdate(varRef.pos, varRef, symbol); BSymbol owner = this.currDependentSymbolDeque.peek(); addFunctionToGlobalVarDependency(owner, ((BLangSimpleVarRef) varRef).symbol); } this.uninitializedVars.remove(((BLangVariableReference) varRef).symbol); } private void checkFinalObjectFieldUpdate(BLangFieldBasedAccess fieldAccess) { BLangExpression expr = fieldAccess.expr; BType exprType = Types.getReferredType(expr.getBType()); if (types.isSubTypeOfBaseType(exprType, TypeTags.OBJECT) && isFinalFieldInAllObjects(fieldAccess.pos, exprType, fieldAccess.field.value)) { dlog.error(fieldAccess.pos, DiagnosticErrorCode.CANNOT_UPDATE_FINAL_OBJECT_FIELD, fieldAccess.symbol); } } private boolean isFinalFieldInAllObjects(Location pos, BType btype, String fieldName) { BType type = Types.getReferredType(btype); if (type.tag == TypeTags.OBJECT) { BField field = ((BObjectType) type).fields.get(fieldName); if (field != null) { return Symbols.isFlagOn(field.symbol.flags, Flags.FINAL); } BObjectTypeSymbol objTypeSymbol = (BObjectTypeSymbol) type.tsymbol; Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objTypeSymbol.name.value, fieldName)); BSymbol funcSymbol = symResolver.resolveObjectMethod(pos, env, funcName, objTypeSymbol); return funcSymbol != null; } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isFinalFieldInAllObjects(pos, memberType, fieldName)) { return false; } } return true; } private void checkFinalEntityUpdate(Location pos, Object field, BSymbol symbol) { if (symbol == null || !Symbols.isFlagOn(symbol.flags, Flags.FINAL)) { return; } if (!this.uninitializedVars.containsKey(symbol)) { dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_FINAL, symbol); return; } InitStatus initStatus = this.uninitializedVars.get(symbol); if (initStatus == InitStatus.PARTIAL_INIT) { dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_TO_POTENTIALLY_INITIALIZED_FINAL, symbol); } } private void terminateFlow() { this.flowTerminated = true; } private void checkUnusedImports(List<BLangImportPackage> imports) { for (BLangImportPackage importStmt : imports) { if (importStmt.symbol == null || importStmt.symbol.isUsed || Names.IGNORE.value.equals(importStmt.alias.value)) { continue; } dlog.error(importStmt.alias.pos, DiagnosticErrorCode.UNUSED_MODULE_PREFIX, importStmt.alias.value); } } private void checkUnusedErrorVarsDeclaredWithVar() { for (Map.Entry<BSymbol, Location> entry : this.unusedErrorVarsDeclaredWithVar.entrySet()) { this.dlog.error(entry.getValue(), DiagnosticErrorCode.UNUSED_VARIABLE_WITH_INFERRED_TYPE_INCLUDING_ERROR, entry.getKey().name); } } private void emitUnusedVariableWarnings(Map<BSymbol, Location> unusedLocalVariables) { for (Map.Entry<BSymbol, Location> entry : unusedLocalVariables.entrySet()) { this.dlog.warning(entry.getValue(), DiagnosticWarningCode.UNUSED_LOCAL_VARIABLE, entry.getKey().name); } } private boolean addVarIfInferredTypeIncludesError(BLangSimpleVariable variable) { BType typeIntersection = types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionContext(), variable.getBType(), symTable.errorType, env); if (typeIntersection != null && typeIntersection != symTable.semanticError && typeIntersection != symTable.noType) { unusedErrorVarsDeclaredWithVar.put(variable.symbol, variable.pos); return true; } return false; } private boolean isLocalVariableDefinedWithNonWildCardBindingPattern(BLangSimpleVariable variable) { if (isWildCardBindingPattern(variable)) { return false; } return isLocalVariable(variable.symbol); } private boolean isWildCardBindingPattern(BLangSimpleVariable variable) { return Names.IGNORE.value.equals(variable.name.value); } private boolean isWildCardBindingPattern(BVarSymbol symbol) { return Names.IGNORE == symbol.name; } private boolean isLocalVariable(BVarSymbol symbol) { if (symbol == null) { return false; } BSymbol owner = symbol.owner; if (owner == null || owner.tag == SymTag.PACKAGE) { return false; } if (owner.tag == SymTag.LET) { return true; } if (owner.tag != SymTag.FUNCTION) { return false; } long flags = symbol.flags; SymbolKind kind = symbol.kind; if (kind == SymbolKind.PATH_PARAMETER || kind == SymbolKind.PATH_REST_PARAMETER) { return false; } return !Symbols.isFlagOn(flags, Flags.REQUIRED_PARAM) && !Symbols.isFlagOn(flags, Flags.DEFAULTABLE_PARAM) && !Symbols.isFlagOn(flags, Flags.INCLUDED) && !Symbols.isFlagOn(flags, Flags.REST_PARAM); } private void populateUnusedVariableMapForNonSimpleBindingPatternVariables( Map<BSymbol, Location> unusedLocalVariables, BLangVariable variable) { if (!isLocalVariable(variable.symbol)) { return; } populateUnusedVariableMapForMembers(unusedLocalVariables, variable); } private void populateUnusedVariableMapForMembers(Map<BSymbol, Location> unusedLocalVariables, BLangVariable variable) { if (variable == null) { return; } switch (variable.getKind()) { case VARIABLE: BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable; if (!isWildCardBindingPattern(simpleVariable)) { unusedLocalVariables.put(simpleVariable.symbol, simpleVariable.pos); } break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; for (BLangRecordVariable.BLangRecordVariableKeyValue member : recordVariable.variableList) { populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern); } populateUnusedVariableMapForMembers(unusedLocalVariables, (BLangVariable) recordVariable.restParam); break; case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; for (BLangVariable memberVariable : tupleVariable.memberVariables) { populateUnusedVariableMapForMembers(unusedLocalVariables, memberVariable); } populateUnusedVariableMapForMembers(unusedLocalVariables, tupleVariable.restVariable); break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.message); populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.cause); for (BLangErrorVariable.BLangErrorDetailEntry member : errorVariable.detail) { populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern); } populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.restDetail); break; } } private boolean isNotVariableReferenceLVExpr(BLangSimpleVarRef varRefExpr) { if (!varRefExpr.isLValue) { return true; } BLangNode parent = varRefExpr.parent; return parent != null && parent.getKind() != NodeKind.ASSIGNMENT; } private boolean isNotRangeExpr(BLangExpression collection) { if (collection.getKind() != NodeKind.BINARY_EXPR) { return true; } OperatorKind opKind = ((BLangBinaryExpr) collection).opKind; return opKind != OperatorKind.HALF_OPEN_RANGE && opKind != OperatorKind.CLOSED_RANGE; } private enum InitStatus { UN_INIT, PARTIAL_INIT } private class BranchResult { Map<BSymbol, InitStatus> uninitializedVars; boolean flowTerminated; BranchResult(Map<BSymbol, InitStatus> uninitializedVars, boolean flowTerminated) { this.uninitializedVars = uninitializedVars; this.flowTerminated = flowTerminated; } } }
The directories should point to `balo` and `caches` here right?
public void testCleanCommand() { CleanCommand cleanCommand = new CleanCommand(Paths.get(System.getProperty("user.dir")), false); new CommandLine(cleanCommand).parse("--sourceroot", this.testResources.resolve("valid-project").toString()); cleanCommand.execute(); Path bin = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME + File.separator + ProjectDirConstants.BIN_DIR_NAME); Path balo = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME + File.separator + ProjectDirConstants.BIN_DIR_NAME); Path caches = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME + File.separator + ProjectDirConstants.BIN_DIR_NAME); Assert.assertFalse(Files.exists(bin), "Check if bin directory is deleted"); Assert.assertFalse(Files.exists(balo), "Check if balo directory is deleted"); Assert.assertFalse(Files.exists(caches), "Check if caches directory is deleted"); }
+ File.separator + ProjectDirConstants.BIN_DIR_NAME);
public void testCleanCommand() { CleanCommand cleanCommand = new CleanCommand(Paths.get(System.getProperty("user.dir")), false); new CommandLine(cleanCommand).parse("--sourceroot", this.testResources.resolve("valid-project").toString()); cleanCommand.execute(); Path bin = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME + File.separator + ProjectDirConstants.BIN_DIR_NAME); Path balo = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME + File.separator + ProjectDirConstants.TARGET_BALO_DIRECTORY); Path caches = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME + File.separator + ProjectDirConstants.CACHES_DIR_NAME); Assert.assertFalse(Files.exists(bin), "Check if bin directory is deleted"); Assert.assertFalse(Files.exists(balo), "Check if balo directory is deleted"); Assert.assertFalse(Files.exists(caches), "Check if caches directory is deleted"); }
class BuildCommandTest extends CommandTest { private Path moduleBalo; private Path tplModuleBalo; private Path testResources; @BeforeClass public void setup() throws IOException { super.setup(); try { this.testResources = super.tmpDir.resolve("build-test-resources"); URI testResourcesURI = getClass().getClassLoader().getResource("test-resources").toURI(); Files.walkFileTree(Paths.get(testResourcesURI), new BuildCommandTest.Copy(Paths.get(testResourcesURI), this.testResources)); } catch (URISyntaxException e) { Assert.fail("error loading resources"); } } @Test(description = "Build non .bal file") public void testNonBalFileBuild() throws IOException { Path nonBalFilePath = this.testResources.resolve("non-bal-file"); BuildCommand buildCommand = new BuildCommand(nonBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("hello_world.txt"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: invalid Ballerina source path. It should either be a name of a module " + "in a Ballerina project or a file with a '.bal' extension. Use -a or " + "--all to build or compile all modules.\n" + "\n" + "USAGE:\n" + " ballerina build {<ballerina-file> | <module-name> | -a | --all}\n" + "\n" + "For more information try --help\n"); } @Test(description = "Build a valid ballerina file") public void testBuildBalFile() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true, validBalFilePath); new CommandLine(buildCommand).parse("hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\thello_world.jar\n"); Assert.assertTrue(Files.exists(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar"))); Files.delete(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar")); readOutput(true); } @Test(description = "Build a valid ballerina file with output flag") public void testBuildBalFileWithOutputFlag() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-o", "foo.jar", "hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\tfoo.jar\n"); Assert.assertTrue(Files.exists(this.testResources.resolve("valid-bal-file").resolve("foo.jar"))); long executableSize = Files.size(this.testResources.resolve("valid-bal-file").resolve("foo.jar")); Files.delete(this.testResources.resolve("valid-bal-file").resolve("foo.jar")); buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-o", "bar", "hello_world.bal"); buildCommand.execute(); buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\tbar.jar\n"); Assert.assertTrue(Files.exists(this.testResources.resolve("valid-bal-file").resolve("bar.jar"))); Assert.assertEquals(Files.size(this.testResources.resolve("valid-bal-file").resolve("bar.jar")), executableSize); Files.delete(this.testResources.resolve("valid-bal-file").resolve("bar.jar")); buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); Path helloExecutableTmpDir = Files.createTempDirectory("hello_executable-"); new CommandLine(buildCommand).parse("-o", helloExecutableTmpDir.toAbsolutePath().toString(), "hello_world.bal"); buildCommand.execute(); buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\t" + helloExecutableTmpDir.toAbsolutePath().resolve("hello_world.jar") + "\n"); Assert.assertTrue(Files.exists(helloExecutableTmpDir.toAbsolutePath().resolve("hello_world.jar"))); buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-o", helloExecutableTmpDir.toAbsolutePath().resolve("hippo.jar").toString(), "hello_world.bal"); buildCommand.execute(); buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\t" + helloExecutableTmpDir.toAbsolutePath().resolve("hippo.jar") + "\n"); Assert.assertTrue(Files.exists(helloExecutableTmpDir.toAbsolutePath().resolve("hippo.jar"))); deleteDirectory(helloExecutableTmpDir); readOutput(true); } @Test(description = "Build a valid ballerina file by passing invalid source root path and absolute bal file path") public void testBuildBalFileWithAbsolutePath() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(this.testResources, printStream, printStream, false, true, validBalFilePath); new CommandLine(buildCommand).parse(validBalFilePath.resolve("hello_world.bal").toAbsolutePath().toString()); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "" + "\n" + "Generating executables\n" + "" + "\thello_world.jar\n"); Assert.assertTrue(Files.exists(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar"))); Files.delete(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar")); readOutput(true); } @Test(description = "Build a valid ballerina file with invalid source root and bal file name") public void testBuildBalFileWithInvalidSourceRoot() throws IOException { BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("oo"), printStream, printStream, false, true); new CommandLine(buildCommand).parse("hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: '" + this.testResources.resolve("oo").resolve("hello_world.bal").toString() + "' Ballerina file does not exist.\n"); } @Test(description = "Build non existing bal file with a valid source root path") public void testNonExistingBalFile() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("xyz.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: '" + validBalFilePath.resolve("xyz.bal").toString() + "' Ballerina file does not exist.\n"); } @Test(description = "Build a bal file without passing bal file name as arg") public void testBuildBalFileWithNoArg() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: 'build' command requires a module name or a Ballerina file to " + "build/compile. Use '-a' or '--all' to build/compile all the modules of " + "the project.\n" + "\n" + "USAGE:\n" + " ballerina build {<ballerina-file> | <module-name> | -a | --all}\n"); } @Test(description = "Build all modules with passing arguments") public void testBuildAllWithArg() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-project"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-a", "hello2"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertTrue(buildLog.contains("too many arguments.\n")); } @Test(description = "Build bal file with no entry") public void testBuildBalFileWithNoEntry() throws IOException { Path sourceRoot = this.testResources.resolve("valid-bal-file-with-no-entry"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("hello_world.bal"); String exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "error: no entry points found in '" + sourceRoot.resolve("hello_world.bal").toString() + "'."); } @Test(description = "Build a valid ballerina file with toml") public void testBuildBalFileWithToml() throws IOException { Path sourceRoot = this.testResources.resolve("single-bal-file-with-toml"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true, sourceRoot); new CommandLine(buildCommand).parse("hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "" + "\n" + "Generating executables\n" + "" + "\thello_world.jar\n"); Assert.assertTrue(Files.exists(sourceRoot.resolve("hello_world.jar"))); Files.delete(sourceRoot.resolve("hello_world.jar")); readOutput(true); } @Test(description = "Build a ballerina project with no modules.") public void testBuildBalProjWithNoModules() throws IOException { Path sourceRoot = this.testResources.resolve("project-with-no-modules"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-a"); String exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "cannot find module(s) to build/compile as 'src' " + "directory is missing. modules should be placed inside " + "an 'src' directory of the project."); } @Test(description = "Build a ballerina project with non existing module.") public void testBuildBalProjectWithInvalidModule() throws IOException { Path sourceRoot = this.testResources.resolve("project-with-no-modules"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("xyz"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: invalid Ballerina source path. It should either be a name of a module " + "in a Ballerina project or a file with a '.bal' extension. Use -a or --all " + "to build or compile all modules.\n" + "\n" + "USAGE:\n" + " ballerina build {<ballerina-file> | <module-name> | -a | --all}\n" + "\n" + "For more information try --help\n"); } @Test(description = "Build a ballerina project with non existing module.") public void testBuildBalProjectToml() throws IOException { Path sourceRoot = this.testResources.resolve("ballerina-toml"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("foo", "--skip-tests"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\tbar/foo:1.2.0\n" + "\nCreating balos\n" + "\ttarget" + File.separator + "balo" + File.separator + "foo-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-any-1.2.0.balo\n" + "\n" + "Generating executables\n" + "\ttarget" + File.separator + "bin" + File.separator + "foo.jar\n"); String tomlContent = ""; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("foo"); String exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: organization name and the version of the project " + "is missing. example: \n" + "[project]\n" + "org-name=\"my_org\"\n" + "version=\"1.0.0\"\n"); tomlContent = "[project]"; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: cannot find 'org-name' under [project]"); tomlContent = "[project]\norg-name=\"bar\""; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: cannot find 'version' under [project]"); tomlContent = "[project]\norg-name=\"bar\"\nversion=\"a.b.c\""; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: 'version' under [project] is not semver"); readOutput(true); } @Test(description = "Test Build Command in a Project") public void testBuildCommand() throws IOException { Path libs = this.testResources.resolve("valid-project").resolve("libs"); Files.createDirectory(libs); zipFile(libs.resolve("toml4j.jar").toFile(), "toml.class"); zipFile(libs.resolve("swagger.jar").toFile(), "swagger.class"); zipFile(libs.resolve("json.jar").toFile(), "json.class"); String[] compileArgs = {"--all", "--skip-tests"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-project"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); Path target = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME); Assert.assertTrue(Files.exists(target), "Check if target directory is created"); Assert.assertTrue(Files.exists(target.resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY)), "Check if balo directory exists"); String baloName = "mymodule-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-java8-0.1.0.balo"; this.moduleBalo = target.resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY) .resolve(baloName); Assert.assertTrue(Files.exists(this.moduleBalo), "Check if balo file exists"); String tplBaloName = "mytemplate-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-any-0.1.0.balo"; this.tplModuleBalo = target.resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY) .resolve(tplBaloName); Assert.assertTrue(Files.exists(this.tplModuleBalo), "Check if template balo file exists"); Path lockFile = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.LOCK_FILE_NAME); Assert.assertTrue(Files.exists(lockFile), "Check if lock file is created"); readOutput(true); } @Test(description = "Build a valid ballerina file with relative path") public void testBuildWithRelativePath() throws IOException { String buildPath = "relative" + File.separator + "testDir" + File.separator + ".." + File.separator + "testBal" + File.separator + "hello_world.bal"; Path sourceRoot = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true, sourceRoot); new CommandLine(buildCommand).parse(buildPath); buildCommand.execute(); Assert.assertTrue(Files.exists(sourceRoot.resolve("hello_world.jar"))); readOutput(true); } private static void zipFile(File file, String contentFile) { try { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(file)); ZipEntry e = new ZipEntry(contentFile); out.putNextEntry(e); StringBuilder sb = new StringBuilder(); sb.append("Test String"); byte[] data = sb.toString().getBytes(); out.write(data, 0, data.length); out.closeEntry(); out.close(); } catch (FileNotFoundException ex) { System.err.format("The file %s does not exist", file.getName()); } catch (IOException ex) { System.err.format("I/O error: " + ex); } } @Test(description = "Test Build Command in a Project which use dependency jar which include stored jar.") public void testBuildCommandWithStoredJarDependency() throws IOException { String[] compileArgs = {"--all", "--skip-tests"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("stored-jar-dependency-project"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); Path target = this.testResources.resolve("stored-jar-dependency-project") .resolve(ProjectDirConstants.TARGET_DIR_NAME); Assert.assertTrue(Files.exists(target), "Check if target directory is created"); Path executablePath = target.resolve(ProjectDirConstants.BIN_DIR_NAME).resolve("mymodule.jar"); JarFile jarFile = new JarFile(executablePath.toFile()); Enumeration e = jarFile.entries(); while (e.hasMoreElements()) { JarEntry je = (JarEntry) e.nextElement(); String name = je.getName(); int method = je.getMethod(); if (name.endsWith(BLANG_COMPILED_JAR_EXT)) { Assert.assertEquals(method, ZipEntry.STORED); } } readOutput(true); } @Test(dependsOnMethods = {"testBuildCommand"}) public void testBuildOutput() throws IOException { Path bin = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME) .resolve(ProjectDirConstants.BIN_DIR_NAME); Assert.assertTrue(Files.exists(bin)); Path myModuleJar = bin.resolve("mymodule" + BLANG_COMPILED_JAR_EXT); Assert.assertTrue(Files.exists(myModuleJar)); JarFile jar = new JarFile(myModuleJar.toFile()); Assert.assertNotNull(jar.getJarEntry("resources/testOrg/mymodule/resource.txt")); Assert.assertNotNull(jar.getJarEntry("resources/testOrg/mymodule/myresource/insideDirectory.txt")); } @Test(dependsOnMethods = {"testBuildOutput"}) @Test(dependsOnMethods = {"testBuildCommand"}) public void testBaloContents() throws IOException { URI baloZip = URI.create("jar:" + moduleBalo.toUri().toString()); FileSystems.newFileSystem(baloZip, Collections.emptyMap()) .getRootDirectories() .forEach(root -> { try (Stream<Path> stream = Files.list(root)) { Path metadata = root.resolve(ProjectDirConstants.BALO_METADATA_DIR_NAME); Assert.assertTrue(Files.exists(metadata)); Assert.assertTrue(Files.isDirectory(metadata)); Path baloToml = metadata.resolve(ProjectDirConstants.BALO_METADATA_FILE); Assert.assertTrue(Files.exists(baloToml)); Path moduleToml = metadata.resolve(ProjectDirConstants.BALO_MODULE_METADATA_FILE); Assert.assertTrue(Files.exists(moduleToml)); String moduleTomlContent = new String(Files.readAllBytes(moduleToml)); String baloTomlContent = new String(Files.readAllBytes(baloToml)); Module module = new Toml().read(moduleTomlContent).to(Module.class); BaloToml balo = new Toml().read(baloTomlContent).to(BaloToml.class); Assert.assertEquals(module.module_version, "0.1.0"); Assert.assertEquals(balo.balo_version, "1.0.0"); Path srcDir = root.resolve(ProjectDirConstants.SOURCE_DIR_NAME); Assert.assertTrue(Files.exists(srcDir)); Path moduleDir = srcDir.resolve("mymodule"); Assert.assertTrue(Files.exists(moduleDir)); Path mainBal = moduleDir.resolve("main.bal"); Assert.assertTrue(Files.exists(mainBal)); Path moduleMD = moduleDir.resolve("Module.md"); Assert.assertFalse(Files.exists(moduleMD)); Path testDir = moduleDir.resolve("tests"); Assert.assertFalse(Files.exists(testDir)); Path resourceDirInModule = moduleDir.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertFalse(Files.exists(resourceDirInModule)); Path resourceDir = root.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertTrue(Files.exists(resourceDir)); Path resourceDirContent = resourceDir.resolve("resource.txt"); Assert.assertTrue(Files.exists(resourceDirContent)); Path docsDir = root.resolve(ProjectDirConstants.BALO_DOC_DIR_NAME); Assert.assertTrue(Files.exists(docsDir)); Path moduleMdInBalo = docsDir.resolve(ProjectDirConstants.MODULE_MD_FILE_NAME); Assert.assertTrue(Files.exists(moduleMdInBalo)); Path platformLibDir = root.resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME); Assert.assertTrue(Files.exists(platformLibDir)); Path jarFile = platformLibDir.resolve("toml4j.jar"); Assert.assertTrue(Files.exists(jarFile)); } catch (IOException ex) { throw new AssertionError("Error while reading balo content"); } }); } @Test(dependsOnMethods = {"testBuildCommand"}) public void testTemplateBaloContents() throws IOException { URI baloZip = URI.create("jar:" + tplModuleBalo.toUri().toString()); FileSystems.newFileSystem(baloZip, Collections.emptyMap()) .getRootDirectories() .forEach(root -> { try (Stream<Path> stream = Files.list(root)) { Path metadata = root.resolve(ProjectDirConstants.BALO_METADATA_DIR_NAME); Assert.assertTrue(Files.exists(metadata)); Assert.assertTrue(Files.isDirectory(metadata)); Path baloToml = metadata.resolve(ProjectDirConstants.BALO_METADATA_FILE); Assert.assertTrue(Files.exists(baloToml)); Path moduleToml = metadata.resolve(ProjectDirConstants.BALO_MODULE_METADATA_FILE); Assert.assertTrue(Files.exists(moduleToml)); String moduleTomlContent = new String(Files.readAllBytes(moduleToml)); String baloTomlContent = new String(Files.readAllBytes(baloToml)); Module module = new Toml().read(moduleTomlContent).to(Module.class); BaloToml balo = new Toml().read(baloTomlContent).to(BaloToml.class); Assert.assertEquals(module.module_version, "0.1.0"); Assert.assertEquals(balo.balo_version, "1.0.0"); Path srcDir = root.resolve(ProjectDirConstants.SOURCE_DIR_NAME); Assert.assertTrue(Files.exists(srcDir)); Path moduleDir = srcDir.resolve("mytemplate"); Assert.assertTrue(Files.exists(moduleDir)); Path testDir = moduleDir.resolve("tests"); Assert.assertTrue(Files.exists(testDir)); Path testFile = testDir.resolve("main_test.bal"); Assert.assertTrue(Files.exists(testFile)); Path mainBal = moduleDir.resolve("main.bal"); Assert.assertTrue(Files.exists(mainBal)); Path moduleMD = moduleDir.resolve("Module.md"); Assert.assertFalse(Files.exists(moduleMD)); Path resourceDirInModule = moduleDir.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertFalse(Files.exists(resourceDirInModule)); Path resourceDir = root.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertTrue(Files.exists(resourceDir)); Path resourceDirContent = resourceDir.resolve("resource.txt"); Assert.assertTrue(Files.exists(resourceDirContent)); Path docsDir = root.resolve(ProjectDirConstants.BALO_DOC_DIR_NAME); Assert.assertTrue(Files.exists(docsDir)); Path moduleMdInBalo = docsDir.resolve(ProjectDirConstants.MODULE_MD_FILE_NAME); Assert.assertTrue(Files.exists(moduleMdInBalo)); Path platformLibDir = root.resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME); Assert.assertFalse(Files.exists(platformLibDir)); Path jarFile = platformLibDir.resolve("toml4j.jar"); Assert.assertFalse(Files.exists(jarFile)); } catch (IOException ex) { throw new AssertionError("Error while reading balo content"); } }); } @Test(dependsOnMethods = {"testBuildCommand"}) public void testTargetCacheDirectory() throws IOException { Path cache = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME) .resolve(ProjectDirConstants.CACHES_DIR_NAME); } @Test(description = "Test the cleaning of target resources in the build command.", dependsOnMethods = {"testBuildCommand"}) public void testTargetClean() throws IOException { String[] compileArgs = {"mymodule", "--skip-tests"}; Path target = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME); BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-project"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); Path executablePath = target.resolve(ProjectDirConstants.BIN_DIR_NAME).resolve("mytemplate.jar"); Assert.assertTrue(Files.exists(executablePath), "Check if executables of other modules are not deleted during a single module build"); } @Test(description = "Test Build Command for a single file.") public void testBuildCommandSingleFile() throws IOException { String[] compileArgs = {"hello_world.bal"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-bal-file"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); readOutput(); Assert.assertFalse(Files.exists(tmpDir.resolve(ProjectDirConstants.TARGET_DIR_NAME)), "Check if target directory is not created"); Path lockFile = tmpDir.resolve(ProjectDirConstants.LOCK_FILE_NAME); Assert.assertFalse(Files.exists(lockFile), "Check if lock file is created"); Path execJar = Paths.get("hello_world.jar"); Assert.assertTrue(Files.exists(execJar), "Check if jar gets created"); Files.delete(execJar); } @Test(description = "Test Build Command for a single file with output flag.") public void testBuildCommandSingleFileWithOutput() throws IOException { String[] compileArgs = {"-osample.jar", "hello_world.bal"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-bal-file"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); readOutput(); Assert.assertFalse(Files.exists(tmpDir.resolve(ProjectDirConstants.TARGET_DIR_NAME)), "Check if target directory is not created"); Path lockFile = tmpDir.resolve(ProjectDirConstants.LOCK_FILE_NAME); Assert.assertFalse(Files.exists(lockFile), "Check if lock file is created"); Path execJar = this.testResources.resolve("valid-bal-file").resolve("sample.jar"); Assert.assertTrue(Files.exists(execJar), "Check if jar gets created"); } @Test(description = "Test the --skip-tests flag in the build command to ensure it avoids compiling tests") public void testBuildWithSkipTests() throws IOException { Path projectWithTestErrors = this.testResources.resolve("project-with-test-errors"); BuildCommand buildCommand = new BuildCommand(projectWithTestErrors, printStream, printStream, false, true); new CommandLine(buildCommand).parse("--skip-tests", "-a"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\ttestOrg/module1:0.1.0\n" + "\nCreating balos\n" + "\ttarget/balo/module1-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-any-0.1.0.balo\n" + "\nGenerating executables\n" + "\ttarget/bin/module1.jar\n"); } static class Copy extends SimpleFileVisitor<Path> { private Path fromPath; private Path toPath; private StandardCopyOption copyOption; public Copy(Path fromPath, Path toPath, StandardCopyOption copyOption) { this.fromPath = fromPath; this.toPath = toPath; this.copyOption = copyOption; } public Copy(Path fromPath, Path toPath) { this(fromPath, toPath, StandardCopyOption.REPLACE_EXISTING); } @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { Path targetPath = toPath.resolve(fromPath.relativize(dir).toString()); if (!Files.exists(targetPath)) { Files.createDirectory(targetPath); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.copy(file, toPath.resolve(fromPath.relativize(file).toString()), copyOption); return FileVisitResult.CONTINUE; } } }
class BuildCommandTest extends CommandTest { private Path moduleBalo; private Path tplModuleBalo; private Path testResources; @BeforeClass public void setup() throws IOException { super.setup(); try { this.testResources = super.tmpDir.resolve("build-test-resources"); URI testResourcesURI = getClass().getClassLoader().getResource("test-resources").toURI(); Files.walkFileTree(Paths.get(testResourcesURI), new BuildCommandTest.Copy(Paths.get(testResourcesURI), this.testResources)); } catch (URISyntaxException e) { Assert.fail("error loading resources"); } } @Test(description = "Build non .bal file") public void testNonBalFileBuild() throws IOException { Path nonBalFilePath = this.testResources.resolve("non-bal-file"); BuildCommand buildCommand = new BuildCommand(nonBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("hello_world.txt"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: invalid Ballerina source path. It should either be a name of a module " + "in a Ballerina project or a file with a '.bal' extension. Use -a or " + "--all to build or compile all modules.\n" + "\n" + "USAGE:\n" + " ballerina build {<ballerina-file> | <module-name> | -a | --all}\n" + "\n" + "For more information try --help\n"); } @Test(description = "Build a valid ballerina file") public void testBuildBalFile() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true, validBalFilePath); new CommandLine(buildCommand).parse("hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\thello_world.jar\n"); Assert.assertTrue(Files.exists(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar"))); Files.delete(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar")); readOutput(true); } @Test(description = "Build a valid ballerina file with output flag") public void testBuildBalFileWithOutputFlag() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-o", "foo.jar", "hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\tfoo.jar\n"); Assert.assertTrue(Files.exists(this.testResources.resolve("valid-bal-file").resolve("foo.jar"))); long executableSize = Files.size(this.testResources.resolve("valid-bal-file").resolve("foo.jar")); Files.delete(this.testResources.resolve("valid-bal-file").resolve("foo.jar")); buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-o", "bar", "hello_world.bal"); buildCommand.execute(); buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\tbar.jar\n"); Assert.assertTrue(Files.exists(this.testResources.resolve("valid-bal-file").resolve("bar.jar"))); Assert.assertEquals(Files.size(this.testResources.resolve("valid-bal-file").resolve("bar.jar")), executableSize); Files.delete(this.testResources.resolve("valid-bal-file").resolve("bar.jar")); buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); Path helloExecutableTmpDir = Files.createTempDirectory("hello_executable-"); new CommandLine(buildCommand).parse("-o", helloExecutableTmpDir.toAbsolutePath().toString(), "hello_world.bal"); buildCommand.execute(); buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\t" + helloExecutableTmpDir.toAbsolutePath().resolve("hello_world.jar") + "\n"); Assert.assertTrue(Files.exists(helloExecutableTmpDir.toAbsolutePath().resolve("hello_world.jar"))); buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-o", helloExecutableTmpDir.toAbsolutePath().resolve("hippo.jar").toString(), "hello_world.bal"); buildCommand.execute(); buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "\n" + "Generating executables\n" + "\t" + helloExecutableTmpDir.toAbsolutePath().resolve("hippo.jar") + "\n"); Assert.assertTrue(Files.exists(helloExecutableTmpDir.toAbsolutePath().resolve("hippo.jar"))); deleteDirectory(helloExecutableTmpDir); readOutput(true); } @Test(description = "Build a valid ballerina file by passing invalid source root path and absolute bal file path") public void testBuildBalFileWithAbsolutePath() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(this.testResources, printStream, printStream, false, true, validBalFilePath); new CommandLine(buildCommand).parse(validBalFilePath.resolve("hello_world.bal").toAbsolutePath().toString()); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "" + "\n" + "Generating executables\n" + "" + "\thello_world.jar\n"); Assert.assertTrue(Files.exists(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar"))); Files.delete(this.testResources .resolve("valid-bal-file") .resolve("hello_world.jar")); readOutput(true); } @Test(description = "Build a valid ballerina file with invalid source root and bal file name") public void testBuildBalFileWithInvalidSourceRoot() throws IOException { BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("oo"), printStream, printStream, false, true); new CommandLine(buildCommand).parse("hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: '" + this.testResources.resolve("oo").resolve("hello_world.bal").toString() + "' Ballerina file does not exist.\n"); } @Test(description = "Build non existing bal file with a valid source root path") public void testNonExistingBalFile() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("xyz.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: '" + validBalFilePath.resolve("xyz.bal").toString() + "' Ballerina file does not exist.\n"); } @Test(description = "Build a bal file without passing bal file name as arg") public void testBuildBalFileWithNoArg() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: 'build' command requires a module name or a Ballerina file to " + "build/compile. Use '-a' or '--all' to build/compile all the modules of " + "the project.\n" + "\n" + "USAGE:\n" + " ballerina build {<ballerina-file> | <module-name> | -a | --all}\n"); } @Test(description = "Build all modules with passing arguments") public void testBuildAllWithArg() throws IOException { Path validBalFilePath = this.testResources.resolve("valid-project"); BuildCommand buildCommand = new BuildCommand(validBalFilePath, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-a", "hello2"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertTrue(buildLog.contains("too many arguments.\n")); } @Test(description = "Build bal file with no entry") public void testBuildBalFileWithNoEntry() throws IOException { Path sourceRoot = this.testResources.resolve("valid-bal-file-with-no-entry"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("hello_world.bal"); String exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "error: no entry points found in '" + sourceRoot.resolve("hello_world.bal").toString() + "'."); } @Test(description = "Build a valid ballerina file with toml") public void testBuildBalFileWithToml() throws IOException { Path sourceRoot = this.testResources.resolve("single-bal-file-with-toml"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true, sourceRoot); new CommandLine(buildCommand).parse("hello_world.bal"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\thello_world.bal\n" + "" + "\n" + "Generating executables\n" + "" + "\thello_world.jar\n"); Assert.assertTrue(Files.exists(sourceRoot.resolve("hello_world.jar"))); Files.delete(sourceRoot.resolve("hello_world.jar")); readOutput(true); } @Test(description = "Build a ballerina project with no modules.") public void testBuildBalProjWithNoModules() throws IOException { Path sourceRoot = this.testResources.resolve("project-with-no-modules"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("-a"); String exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "cannot find module(s) to build/compile as 'src' " + "directory is missing. modules should be placed inside " + "an 'src' directory of the project."); } @Test(description = "Build a ballerina project with non existing module.") public void testBuildBalProjectWithInvalidModule() throws IOException { Path sourceRoot = this.testResources.resolve("project-with-no-modules"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("xyz"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "ballerina: invalid Ballerina source path. It should either be a name of a module " + "in a Ballerina project or a file with a '.bal' extension. Use -a or --all " + "to build or compile all modules.\n" + "\n" + "USAGE:\n" + " ballerina build {<ballerina-file> | <module-name> | -a | --all}\n" + "\n" + "For more information try --help\n"); } @Test(description = "Build a ballerina project with non existing module.") public void testBuildBalProjectToml() throws IOException { Path sourceRoot = this.testResources.resolve("ballerina-toml"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("foo", "--skip-tests"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\tbar/foo:1.2.0\n" + "\nCreating balos\n" + "\ttarget" + File.separator + "balo" + File.separator + "foo-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-any-1.2.0.balo\n" + "\n" + "Generating executables\n" + "\ttarget" + File.separator + "bin" + File.separator + "foo.jar\n"); String tomlContent = ""; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true); new CommandLine(buildCommand).parse("foo"); String exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: organization name and the version of the project " + "is missing. example: \n" + "[project]\n" + "org-name=\"my_org\"\n" + "version=\"1.0.0\"\n"); tomlContent = "[project]"; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: cannot find 'org-name' under [project]"); tomlContent = "[project]\norg-name=\"bar\""; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: cannot find 'version' under [project]"); tomlContent = "[project]\norg-name=\"bar\"\nversion=\"a.b.c\""; Files.write(sourceRoot.resolve("Ballerina.toml"), tomlContent.getBytes(), StandardOpenOption.TRUNCATE_EXISTING); exMsg = executeAndGetException(buildCommand); Assert.assertEquals(exMsg, "invalid Ballerina.toml file: 'version' under [project] is not semver"); readOutput(true); } @Test(description = "Test Build Command in a Project") public void testBuildCommand() throws IOException { Path libs = this.testResources.resolve("valid-project").resolve("libs"); Files.createDirectory(libs); zipFile(libs.resolve("toml4j.jar").toFile(), "toml.class"); zipFile(libs.resolve("swagger.jar").toFile(), "swagger.class"); zipFile(libs.resolve("json.jar").toFile(), "json.class"); String[] compileArgs = {"--all", "--skip-tests"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-project"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); Path target = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME); Assert.assertTrue(Files.exists(target), "Check if target directory is created"); Assert.assertTrue(Files.exists(target.resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY)), "Check if balo directory exists"); String baloName = "mymodule-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-java8-0.1.0.balo"; this.moduleBalo = target.resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY) .resolve(baloName); Assert.assertTrue(Files.exists(this.moduleBalo), "Check if balo file exists"); String tplBaloName = "mytemplate-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-any-0.1.0.balo"; this.tplModuleBalo = target.resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY) .resolve(tplBaloName); Assert.assertTrue(Files.exists(this.tplModuleBalo), "Check if template balo file exists"); Path lockFile = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.LOCK_FILE_NAME); Assert.assertTrue(Files.exists(lockFile), "Check if lock file is created"); readOutput(true); } @Test(description = "Build a valid ballerina file with relative path") public void testBuildWithRelativePath() throws IOException { String buildPath = "relative" + File.separator + "testDir" + File.separator + ".." + File.separator + "testBal" + File.separator + "hello_world.bal"; Path sourceRoot = this.testResources.resolve("valid-bal-file"); BuildCommand buildCommand = new BuildCommand(sourceRoot, printStream, printStream, false, true, sourceRoot); new CommandLine(buildCommand).parse(buildPath); buildCommand.execute(); Assert.assertTrue(Files.exists(sourceRoot.resolve("hello_world.jar"))); readOutput(true); } private static void zipFile(File file, String contentFile) { try { ZipOutputStream out = new ZipOutputStream(new FileOutputStream(file)); ZipEntry e = new ZipEntry(contentFile); out.putNextEntry(e); StringBuilder sb = new StringBuilder(); sb.append("Test String"); byte[] data = sb.toString().getBytes(); out.write(data, 0, data.length); out.closeEntry(); out.close(); } catch (FileNotFoundException ex) { System.err.format("The file %s does not exist", file.getName()); } catch (IOException ex) { System.err.format("I/O error: " + ex); } } @Test(description = "Test Build Command in a Project which use dependency jar which include stored jar.") public void testBuildCommandWithStoredJarDependency() throws IOException { String[] compileArgs = {"--all", "--skip-tests"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("stored-jar-dependency-project"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); Path target = this.testResources.resolve("stored-jar-dependency-project") .resolve(ProjectDirConstants.TARGET_DIR_NAME); Assert.assertTrue(Files.exists(target), "Check if target directory is created"); Path executablePath = target.resolve(ProjectDirConstants.BIN_DIR_NAME).resolve("mymodule.jar"); JarFile jarFile = new JarFile(executablePath.toFile()); Enumeration e = jarFile.entries(); while (e.hasMoreElements()) { JarEntry je = (JarEntry) e.nextElement(); String name = je.getName(); int method = je.getMethod(); if (name.endsWith(BLANG_COMPILED_JAR_EXT)) { Assert.assertEquals(method, ZipEntry.STORED); } } readOutput(true); } @Test(dependsOnMethods = {"testBuildCommand"}) public void testBuildOutput() throws IOException { Path bin = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME) .resolve(ProjectDirConstants.BIN_DIR_NAME); Assert.assertTrue(Files.exists(bin)); Path myModuleJar = bin.resolve("mymodule" + BLANG_COMPILED_JAR_EXT); Assert.assertTrue(Files.exists(myModuleJar)); JarFile jar = new JarFile(myModuleJar.toFile()); Assert.assertNotNull(jar.getJarEntry("resources/testOrg/mymodule/resource.txt")); Assert.assertNotNull(jar.getJarEntry("resources/testOrg/mymodule/myresource/insideDirectory.txt")); } @Test(dependsOnMethods = {"testBuildOutput"}) @Test(dependsOnMethods = {"testBuildCommand"}) public void testBaloContents() throws IOException { URI baloZip = URI.create("jar:" + moduleBalo.toUri().toString()); FileSystems.newFileSystem(baloZip, Collections.emptyMap()) .getRootDirectories() .forEach(root -> { try (Stream<Path> stream = Files.list(root)) { Path metadata = root.resolve(ProjectDirConstants.BALO_METADATA_DIR_NAME); Assert.assertTrue(Files.exists(metadata)); Assert.assertTrue(Files.isDirectory(metadata)); Path baloToml = metadata.resolve(ProjectDirConstants.BALO_METADATA_FILE); Assert.assertTrue(Files.exists(baloToml)); Path moduleToml = metadata.resolve(ProjectDirConstants.BALO_MODULE_METADATA_FILE); Assert.assertTrue(Files.exists(moduleToml)); String moduleTomlContent = new String(Files.readAllBytes(moduleToml)); String baloTomlContent = new String(Files.readAllBytes(baloToml)); Module module = new Toml().read(moduleTomlContent).to(Module.class); BaloToml balo = new Toml().read(baloTomlContent).to(BaloToml.class); Assert.assertEquals(module.module_version, "0.1.0"); Assert.assertEquals(balo.balo_version, "1.0.0"); Path srcDir = root.resolve(ProjectDirConstants.SOURCE_DIR_NAME); Assert.assertTrue(Files.exists(srcDir)); Path moduleDir = srcDir.resolve("mymodule"); Assert.assertTrue(Files.exists(moduleDir)); Path mainBal = moduleDir.resolve("main.bal"); Assert.assertTrue(Files.exists(mainBal)); Path moduleMD = moduleDir.resolve("Module.md"); Assert.assertFalse(Files.exists(moduleMD)); Path testDir = moduleDir.resolve("tests"); Assert.assertFalse(Files.exists(testDir)); Path resourceDirInModule = moduleDir.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertFalse(Files.exists(resourceDirInModule)); Path resourceDir = root.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertTrue(Files.exists(resourceDir)); Path resourceDirContent = resourceDir.resolve("resource.txt"); Assert.assertTrue(Files.exists(resourceDirContent)); Path docsDir = root.resolve(ProjectDirConstants.BALO_DOC_DIR_NAME); Assert.assertTrue(Files.exists(docsDir)); Path moduleMdInBalo = docsDir.resolve(ProjectDirConstants.MODULE_MD_FILE_NAME); Assert.assertTrue(Files.exists(moduleMdInBalo)); Path platformLibDir = root.resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME); Assert.assertTrue(Files.exists(platformLibDir)); Path jarFile = platformLibDir.resolve("toml4j.jar"); Assert.assertTrue(Files.exists(jarFile)); } catch (IOException ex) { throw new AssertionError("Error while reading balo content"); } }); } @Test(dependsOnMethods = {"testBuildCommand"}) public void testTemplateBaloContents() throws IOException { URI baloZip = URI.create("jar:" + tplModuleBalo.toUri().toString()); FileSystems.newFileSystem(baloZip, Collections.emptyMap()) .getRootDirectories() .forEach(root -> { try (Stream<Path> stream = Files.list(root)) { Path metadata = root.resolve(ProjectDirConstants.BALO_METADATA_DIR_NAME); Assert.assertTrue(Files.exists(metadata)); Assert.assertTrue(Files.isDirectory(metadata)); Path baloToml = metadata.resolve(ProjectDirConstants.BALO_METADATA_FILE); Assert.assertTrue(Files.exists(baloToml)); Path moduleToml = metadata.resolve(ProjectDirConstants.BALO_MODULE_METADATA_FILE); Assert.assertTrue(Files.exists(moduleToml)); String moduleTomlContent = new String(Files.readAllBytes(moduleToml)); String baloTomlContent = new String(Files.readAllBytes(baloToml)); Module module = new Toml().read(moduleTomlContent).to(Module.class); BaloToml balo = new Toml().read(baloTomlContent).to(BaloToml.class); Assert.assertEquals(module.module_version, "0.1.0"); Assert.assertEquals(balo.balo_version, "1.0.0"); Path srcDir = root.resolve(ProjectDirConstants.SOURCE_DIR_NAME); Assert.assertTrue(Files.exists(srcDir)); Path moduleDir = srcDir.resolve("mytemplate"); Assert.assertTrue(Files.exists(moduleDir)); Path testDir = moduleDir.resolve("tests"); Assert.assertTrue(Files.exists(testDir)); Path testFile = testDir.resolve("main_test.bal"); Assert.assertTrue(Files.exists(testFile)); Path mainBal = moduleDir.resolve("main.bal"); Assert.assertTrue(Files.exists(mainBal)); Path moduleMD = moduleDir.resolve("Module.md"); Assert.assertFalse(Files.exists(moduleMD)); Path resourceDirInModule = moduleDir.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertFalse(Files.exists(resourceDirInModule)); Path resourceDir = root.resolve(ProjectDirConstants.RESOURCE_DIR_NAME); Assert.assertTrue(Files.exists(resourceDir)); Path resourceDirContent = resourceDir.resolve("resource.txt"); Assert.assertTrue(Files.exists(resourceDirContent)); Path docsDir = root.resolve(ProjectDirConstants.BALO_DOC_DIR_NAME); Assert.assertTrue(Files.exists(docsDir)); Path moduleMdInBalo = docsDir.resolve(ProjectDirConstants.MODULE_MD_FILE_NAME); Assert.assertTrue(Files.exists(moduleMdInBalo)); Path platformLibDir = root.resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME); Assert.assertFalse(Files.exists(platformLibDir)); Path jarFile = platformLibDir.resolve("toml4j.jar"); Assert.assertFalse(Files.exists(jarFile)); } catch (IOException ex) { throw new AssertionError("Error while reading balo content"); } }); } @Test(dependsOnMethods = {"testBuildCommand"}) public void testTargetCacheDirectory() throws IOException { Path cache = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME) .resolve(ProjectDirConstants.CACHES_DIR_NAME); } @Test(description = "Test the cleaning of target resources in the build command.", dependsOnMethods = {"testBuildCommand"}) public void testTargetClean() throws IOException { String[] compileArgs = {"mymodule", "--skip-tests"}; Path target = this.testResources.resolve("valid-project").resolve(ProjectDirConstants.TARGET_DIR_NAME); BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-project"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); Path executablePath = target.resolve(ProjectDirConstants.BIN_DIR_NAME).resolve("mytemplate.jar"); Assert.assertTrue(Files.exists(executablePath), "Check if executables of other modules are not deleted during a single module build"); } @Test(description = "Test Build Command for a single file.") public void testBuildCommandSingleFile() throws IOException { String[] compileArgs = {"hello_world.bal"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-bal-file"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); readOutput(); Assert.assertFalse(Files.exists(tmpDir.resolve(ProjectDirConstants.TARGET_DIR_NAME)), "Check if target directory is not created"); Path lockFile = tmpDir.resolve(ProjectDirConstants.LOCK_FILE_NAME); Assert.assertFalse(Files.exists(lockFile), "Check if lock file is created"); Path execJar = Paths.get("hello_world.jar"); Assert.assertTrue(Files.exists(execJar), "Check if jar gets created"); Files.delete(execJar); } @Test(description = "Test Build Command for a single file with output flag.") public void testBuildCommandSingleFileWithOutput() throws IOException { String[] compileArgs = {"-osample.jar", "hello_world.bal"}; BuildCommand buildCommand = new BuildCommand(this.testResources.resolve("valid-bal-file"), printStream, printStream, false, true); new CommandLine(buildCommand).parse(compileArgs); buildCommand.execute(); readOutput(); Assert.assertFalse(Files.exists(tmpDir.resolve(ProjectDirConstants.TARGET_DIR_NAME)), "Check if target directory is not created"); Path lockFile = tmpDir.resolve(ProjectDirConstants.LOCK_FILE_NAME); Assert.assertFalse(Files.exists(lockFile), "Check if lock file is created"); Path execJar = this.testResources.resolve("valid-bal-file").resolve("sample.jar"); Assert.assertTrue(Files.exists(execJar), "Check if jar gets created"); } @Test(description = "Test the --skip-tests flag in the build command to ensure it avoids compiling tests") public void testBuildWithSkipTests() throws IOException { Path projectWithTestErrors = this.testResources.resolve("project-with-test-errors"); BuildCommand buildCommand = new BuildCommand(projectWithTestErrors, printStream, printStream, false, true); new CommandLine(buildCommand).parse("--skip-tests", "-a"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), "Compiling source\n" + "\ttestOrg/module1:0.1.0\n" + "\nCreating balos\n" + "\ttarget/balo/module1-" + ProgramFileConstants.IMPLEMENTATION_VERSION + "-any-0.1.0.balo\n" + "\nGenerating executables\n" + "\ttarget/bin/module1.jar\n"); } static class Copy extends SimpleFileVisitor<Path> { private Path fromPath; private Path toPath; private StandardCopyOption copyOption; public Copy(Path fromPath, Path toPath, StandardCopyOption copyOption) { this.fromPath = fromPath; this.toPath = toPath; this.copyOption = copyOption; } public Copy(Path fromPath, Path toPath) { this(fromPath, toPath, StandardCopyOption.REPLACE_EXISTING); } @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { Path targetPath = toPath.resolve(fromPath.relativize(dir).toString()); if (!Files.exists(targetPath)) { Files.createDirectory(targetPath); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.copy(file, toPath.resolve(fromPath.relativize(file).toString()), copyOption); return FileVisitResult.CONTINUE; } } }
Can do a substring instead? Let's also extract the substring logic out to a method.
public static Object call(BFunctionPointer<Object, Object> func, Object... args) { BFunctionType functionType = (BFunctionType) func.getType(); List<Type> paramTypes = new LinkedList<>(); List<Type> argTypes = new LinkedList<>(); List<Object> argsList = new java.util.ArrayList<>(List.of(Scheduler.getStrand())); if (checkIsValidPositionalArgs(args, argsList, functionType, paramTypes, argTypes) || checkIsValidRestArgs(args, argsList, functionType, paramTypes, argTypes)) { Type restType = functionType.restType != null ? ((BArrayType) functionType.restType).getElementType() : null; throw ErrorCreator.createError( getModulePrefixedReason(FUNCTION_LANG_LIB, INCOMPATIBLE_ARGUMENTS), BLangExceptionHelper.getErrorDetails(RuntimeErrors.INCOMPATIBLE_ARGUMENTS, new BTupleType(paramTypes, restType, 0, false) .toString().replace("[", "").replace("]", ""), new BTupleType(argTypes).toString().replace("[", "").replace("]", ""))); } return func.asyncCall(argsList.toArray(), METADATA); }
new BTupleType(argTypes).toString().replace("[", "").replace("]", "")));
public static Object call(BFunctionPointer<Object, Object> func, Object... args) { BFunctionType functionType = (BFunctionType) func.getType(); List<Type> paramTypes = new LinkedList<>(); List<Type> argTypes = new LinkedList<>(); List<Object> argsList = new java.util.ArrayList<>(List.of(Scheduler.getStrand())); if (checkIsValidPositionalArgs(args, argsList, functionType, paramTypes, argTypes) || checkIsValidRestArgs(args, argsList, functionType, paramTypes, argTypes)) { Type restType = functionType.restType != null ? ((BArrayType) functionType.restType).getElementType() : null; throw ErrorCreator.createError( getModulePrefixedReason(FUNCTION_LANG_LIB, INCOMPATIBLE_ARGUMENTS), BLangExceptionHelper.getErrorDetails(RuntimeErrors.INCOMPATIBLE_ARGUMENTS, removeBracketsFromStringFormatOfTuple(new BTupleType(argTypes)), removeBracketsFromStringFormatOfTuple(new BTupleType(paramTypes, restType, 0, false)))); } return func.asyncCall(argsList.toArray(), METADATA); }
class Call { private static final StrandMetadata METADATA = new StrandMetadata(BALLERINA_BUILTIN_PKG_PREFIX, FUNCTION_LANG_LIB, "1.0.0", "call"); private static boolean checkIsValidPositionalArgs(Object[] args, List<Object> argsList, BFunctionType functionType, List<Type> paramTypes, List<Type> argTypes) { boolean errored = false; Parameter[] parameters = functionType.parameters; int numOfParams = parameters.length; int numOfArgs = args.length; for (int i = 0; i < numOfParams; i++) { Parameter parameter = parameters[i]; Type paramType = parameter.type; paramTypes.add(paramType); if (i < numOfArgs) { Object arg = args[i]; Type argType = TypeChecker.getType(arg); argTypes.add(argType); if (!TypeChecker.checkIsType(null, arg, argType, paramType)) { errored = true; } argsList.add(arg); argsList.add(true); } else if (parameter.isDefault) { argsList.add(0); argsList.add(false); } else { errored = true; } } return errored; } private static boolean checkIsValidRestArgs(Object[] args, List<Object> argsList, BFunctionType functionType, List<Type> paramTypes, List<Type> argTypes) { boolean errored = false; int numOfArgs = args.length; int numOfRestArgs = Math.max(numOfArgs - functionType.parameters.length, 0); BArrayType restType = (BArrayType) functionType.restType; if (restType != null) { ListInitialValueEntry.ExpressionEntry[] initialValues = new ListInitialValueEntry.ExpressionEntry[numOfRestArgs]; Type elementType = restType.getElementType(); for (int i = 0; i < numOfRestArgs; i++) { Object arg = args[numOfArgs - numOfRestArgs + i]; Type argType = TypeChecker.getType(arg); argTypes.add(argType); if (!TypeChecker.checkIsType(null, arg, argType, elementType)) { errored = true; } initialValues[i] = new ListInitialValueEntry.ExpressionEntry(arg); } if (!errored) { argsList.add(new ArrayValueImpl(restType, -1L, initialValues)); argsList.add(true); } } else if (numOfRestArgs > 0) { errored = true; for (int i = numOfArgs - numOfRestArgs; i < numOfArgs; i++) { argTypes.add(TypeChecker.getType(args[i])); } } return errored; } }
class Call { private static final StrandMetadata METADATA = new StrandMetadata(BALLERINA_BUILTIN_PKG_PREFIX, FUNCTION_LANG_LIB, "1.0.0", "call"); private static boolean checkIsValidPositionalArgs(Object[] args, List<Object> argsList, BFunctionType functionType, List<Type> paramTypes, List<Type> argTypes) { boolean errored = false; Parameter[] parameters = functionType.parameters; int numOfParams = parameters.length; int numOfArgs = args.length; for (int i = 0; i < numOfParams; i++) { Parameter parameter = parameters[i]; Type paramType = parameter.type; paramTypes.add(paramType); if (i < numOfArgs) { Object arg = args[i]; Type argType = TypeChecker.getType(arg); argTypes.add(argType); if (!TypeChecker.checkIsType(null, arg, argType, paramType)) { errored = true; } argsList.add(arg); argsList.add(true); } else if (parameter.isDefault) { argsList.add(0); argsList.add(false); } else { errored = true; } } return errored; } private static boolean checkIsValidRestArgs(Object[] args, List<Object> argsList, BFunctionType functionType, List<Type> paramTypes, List<Type> argTypes) { boolean errored = false; int numOfArgs = args.length; int numOfRestArgs = Math.max(numOfArgs - functionType.parameters.length, 0); BArrayType restType = (BArrayType) functionType.restType; if (restType != null) { ListInitialValueEntry.ExpressionEntry[] initialValues = new ListInitialValueEntry.ExpressionEntry[numOfRestArgs]; Type elementType = restType.getElementType(); for (int i = 0; i < numOfRestArgs; i++) { Object arg = args[numOfArgs - numOfRestArgs + i]; Type argType = TypeChecker.getType(arg); argTypes.add(argType); if (!TypeChecker.checkIsType(null, arg, argType, elementType)) { errored = true; } initialValues[i] = new ListInitialValueEntry.ExpressionEntry(arg); } if (!errored) { argsList.add(new ArrayValueImpl(restType, -1L, initialValues)); argsList.add(true); } } else if (numOfRestArgs > 0) { errored = true; for (int i = numOfArgs - numOfRestArgs; i < numOfArgs; i++) { argTypes.add(TypeChecker.getType(args[i])); } } return errored; } private static String removeBracketsFromStringFormatOfTuple(BTupleType tupleType) { String stringValue = tupleType.toString(); return "(" + stringValue.substring(1, stringValue.length() - 1) + ")"; } }
We want to switch this metadata to use options. However you are translating proto option names directly to field names, which means there's no guarantee that these other options won't conflict. Maybe prefix all of these options with something to prevent potential conflict?
static Schema getSchema(Descriptors.Descriptor descriptor) { Set<Integer> oneOfFields = Sets.newHashSet(); List<Field> fields = Lists.newArrayListWithCapacity(descriptor.getFields().size()); for (OneofDescriptor oneofDescriptor : descriptor.getOneofs()) { List<Field> subFields = Lists.newArrayListWithCapacity(oneofDescriptor.getFieldCount()); Map<String, Integer> enumIds = Maps.newHashMap(); for (FieldDescriptor fieldDescriptor : oneofDescriptor.getFields()) { oneOfFields.add(fieldDescriptor.getNumber()); FieldType fieldType = withMetaData(beamFieldTypeFromProtoField(fieldDescriptor), fieldDescriptor); subFields.add(Field.nullable(fieldDescriptor.getName(), fieldType)); checkArgument( enumIds.putIfAbsent(fieldDescriptor.getName(), fieldDescriptor.getNumber()) == null); } FieldType oneOfType = FieldType.logicalType(OneOfType.create(subFields, enumIds)); fields.add(Field.of(oneofDescriptor.getName(), oneOfType)); } for (Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields()) { if (!oneOfFields.contains(fieldDescriptor.getNumber())) { FieldType fieldType = withMetaData(beamFieldTypeFromProtoField(fieldDescriptor), fieldDescriptor); fields.add( Field.of(fieldDescriptor.getName(), fieldType) .withOptions(getFieldOptions(fieldDescriptor))); } } return Schema.builder().addFields(fields).setOptions(getSchemaOptions(descriptor)).build(); }
return Schema.builder().addFields(fields).setOptions(getSchemaOptions(descriptor)).build();
static Schema getSchema(Descriptors.Descriptor descriptor) { Set<Integer> oneOfFields = Sets.newHashSet(); List<Field> fields = Lists.newArrayListWithCapacity(descriptor.getFields().size()); for (OneofDescriptor oneofDescriptor : descriptor.getOneofs()) { List<Field> subFields = Lists.newArrayListWithCapacity(oneofDescriptor.getFieldCount()); Map<String, Integer> enumIds = Maps.newHashMap(); for (FieldDescriptor fieldDescriptor : oneofDescriptor.getFields()) { oneOfFields.add(fieldDescriptor.getNumber()); FieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor); subFields.add( withFieldNumber( Field.nullable(fieldDescriptor.getName(), fieldType), fieldDescriptor.getNumber())); checkArgument( enumIds.putIfAbsent(fieldDescriptor.getName(), fieldDescriptor.getNumber()) == null); } FieldType oneOfType = FieldType.logicalType(OneOfType.create(subFields, enumIds)); fields.add(Field.of(oneofDescriptor.getName(), oneOfType)); } for (Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields()) { if (!oneOfFields.contains(fieldDescriptor.getNumber())) { FieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor); fields.add( withFieldNumber( Field.of(fieldDescriptor.getName(), fieldType), fieldDescriptor.getNumber()) .withOptions(getFieldOptions(fieldDescriptor))); } } return Schema.builder() .addFields(fields) .setOptions( getSchemaOptions(descriptor) .setOption( SCHEMA_OPTION_META_TYPE_NAME, FieldType.STRING, descriptor.getFullName())) .build(); }
class ProtoSchemaTranslator { /** This METADATA tag is used to store the field number of a proto tag. */ public static final String PROTO_NUMBER_METADATA_TAG = "PROTO_NUMBER"; public static final String PROTO_MESSAGE_NAME_METADATA_TAG = "PROTO_MESSAGE_NAME"; public static final String PROTO_MAP_KEY_MESSAGE_NAME_METADATA_TAG = "PROTO_MAP_KEY_MESSAGE_NAME"; public static final String PROTO_MAP_VALUE_MESSAGE_NAME_METADATA_TAG = "PROTO_MAP_VALUE_MESSAGE_NAME"; /** Attach a proto field number to a type. */ static FieldType withFieldNumber(FieldType fieldType, int index) { return fieldType.withMetadata(PROTO_NUMBER_METADATA_TAG, Long.toString(index)); } /** Return the proto field number for a type. */ static int getFieldNumber(FieldType fieldType) { return Integer.parseInt(fieldType.getMetadataString(PROTO_NUMBER_METADATA_TAG)); } /** Attach the name of the message to a type. */ public static FieldType withMessageName(FieldType fieldType, String messageName) { return fieldType.withMetadata(PROTO_MESSAGE_NAME_METADATA_TAG, messageName); } /** Return the message name for a type. */ public static String getMessageName(FieldType fieldType) { return fieldType.getMetadataString(PROTO_MESSAGE_NAME_METADATA_TAG); } /** Attach the name of the message to a map key. */ public static FieldType withMapKeyMessageName(FieldType fieldType, String messageName) { return fieldType.withMetadata(PROTO_MAP_KEY_MESSAGE_NAME_METADATA_TAG, messageName); } /** Return the message name for a map key. */ public static String getMapKeyMessageName(FieldType fieldType) { return fieldType.getMetadataString(PROTO_MAP_KEY_MESSAGE_NAME_METADATA_TAG); } /** Attach the name of the message to a map value. */ public static FieldType withMapValueMessageName(FieldType fieldType, String messageName) { return fieldType.withMetadata(PROTO_MAP_VALUE_MESSAGE_NAME_METADATA_TAG, messageName); } /** Return the message name for a map value. */ public static String getMapValueMessageName(FieldType fieldType) { return fieldType.getMetadataString(PROTO_MAP_VALUE_MESSAGE_NAME_METADATA_TAG); } /** Return a Beam scheam representing a proto class. */ static Schema getSchema(Class<? extends Message> clazz) { return getSchema(ProtobufUtil.getDescriptorForClass(clazz)); } private static FieldType withMetaData( FieldType inType, Descriptors.FieldDescriptor fieldDescriptor) { FieldType fieldType = withFieldNumber(inType, fieldDescriptor.getNumber()); if (fieldDescriptor.isMapField()) { FieldDescriptor keyFieldDescriptor = fieldDescriptor.getMessageType().findFieldByName("key"); FieldDescriptor valueFieldDescriptor = fieldDescriptor.getMessageType().findFieldByName("value"); if ((keyFieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE)) { fieldType = withMapKeyMessageName(fieldType, keyFieldDescriptor.getMessageType().getFullName()); } if ((valueFieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE)) { fieldType = withMapValueMessageName(fieldType, valueFieldDescriptor.getMessageType().getFullName()); } } else if (fieldDescriptor.getType() == FieldDescriptor.Type.MESSAGE) { return withMessageName(fieldType, fieldDescriptor.getMessageType().getFullName()); } return fieldType; } private static FieldType beamFieldTypeFromProtoField( Descriptors.FieldDescriptor protoFieldDescriptor) { FieldType fieldType = null; if (protoFieldDescriptor.isMapField()) { FieldDescriptor keyFieldDescriptor = protoFieldDescriptor.getMessageType().findFieldByName("key"); FieldDescriptor valueFieldDescriptor = protoFieldDescriptor.getMessageType().findFieldByName("value"); fieldType = FieldType.map( beamFieldTypeFromProtoField(keyFieldDescriptor).withNullable(false), beamFieldTypeFromProtoField(valueFieldDescriptor).withNullable(false)); } else if (protoFieldDescriptor.isRepeated()) { fieldType = FieldType.array( beamFieldTypeFromSingularProtoField(protoFieldDescriptor).withNullable(false)); } else { fieldType = beamFieldTypeFromSingularProtoField(protoFieldDescriptor); } return fieldType; } private static FieldType beamFieldTypeFromSingularProtoField( Descriptors.FieldDescriptor protoFieldDescriptor) { Descriptors.FieldDescriptor.Type fieldDescriptor = protoFieldDescriptor.getType(); FieldType fieldType; switch (fieldDescriptor) { case INT32: fieldType = FieldType.INT32; break; case INT64: fieldType = FieldType.INT64; break; case FLOAT: fieldType = FieldType.FLOAT; break; case DOUBLE: fieldType = FieldType.DOUBLE; break; case BOOL: fieldType = FieldType.BOOLEAN; break; case STRING: fieldType = FieldType.STRING; break; case BYTES: fieldType = FieldType.BYTES; break; case UINT32: fieldType = FieldType.logicalType(new UInt32()); break; case SINT32: fieldType = FieldType.logicalType(new SInt32()); break; case FIXED32: fieldType = FieldType.logicalType(new Fixed32()); break; case SFIXED32: fieldType = FieldType.logicalType(new SFixed32()); break; case UINT64: fieldType = FieldType.logicalType(new UInt64()); break; case SINT64: fieldType = FieldType.logicalType(new SInt64()); break; case FIXED64: fieldType = FieldType.logicalType(new Fixed64()); break; case SFIXED64: fieldType = FieldType.logicalType(new SFixed64()); break; case ENUM: Map<String, Integer> enumValues = Maps.newHashMap(); for (EnumValueDescriptor enumValue : protoFieldDescriptor.getEnumType().getValues()) { if (enumValues.putIfAbsent(enumValue.getName(), enumValue.getNumber()) != null) { throw new RuntimeException("Aliased enumerations not currently supported."); } } fieldType = FieldType.logicalType(EnumerationType.create(enumValues)); break; case MESSAGE: case GROUP: String fullName = protoFieldDescriptor.getMessageType().getFullName(); switch (fullName) { case "google.protobuf.Timestamp": fieldType = FieldType.logicalType(new NanosInstant()); break; case "google.protobuf.Int32Value": case "google.protobuf.UInt32Value": case "google.protobuf.Int64Value": case "google.protobuf.UInt64Value": case "google.protobuf.FloatValue": case "google.protobuf.DoubleValue": case "google.protobuf.StringValue": case "google.protobuf.BoolValue": case "google.protobuf.BytesValue": fieldType = beamFieldTypeFromSingularProtoField( protoFieldDescriptor.getMessageType().findFieldByNumber(1)); break; case "google.protobuf.Duration": fieldType = FieldType.logicalType(new NanosDuration()); break; case "google.protobuf.Any": throw new RuntimeException("Any not yet supported"); default: fieldType = FieldType.row(getSchema(protoFieldDescriptor.getMessageType())); } if (protoFieldDescriptor.isOptional()) { fieldType = fieldType.withNullable(true); } break; default: throw new RuntimeException("Field type not matched."); } return fieldType; } private static Schema.Options getFieldOptions(FieldDescriptor fieldDescriptor) { return getOptions(fieldDescriptor.getOptions().getAllFields()); } private static Schema.Options getSchemaOptions(Descriptors.Descriptor descriptor) { return getOptions(descriptor.getOptions().getAllFields()); } private static Schema.Options getOptions(Map<FieldDescriptor, Object> allFields) { Schema.Options.Builder optionsBuilder = Schema.Options.builder(); for (Map.Entry<FieldDescriptor, Object> entry : allFields.entrySet()) { FieldDescriptor fieldDescriptor = entry.getKey(); FieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor); switch (fieldType.getTypeName()) { case BYTE: case BYTES: case INT16: case INT32: case INT64: case DECIMAL: case FLOAT: case DOUBLE: case STRING: case BOOLEAN: case LOGICAL_TYPE: case ROW: case ARRAY: case ITERABLE: Field field = Field.of("OPTION", fieldType); ProtoDynamicMessageSchema schema = ProtoDynamicMessageSchema.forSchema(Schema.of(field)); optionsBuilder.setOption( fieldDescriptor.getFullName(), fieldType, schema.createConverter(field).convertFromProtoValue(entry.getValue())); break; case MAP: case DATETIME: default: throw new IllegalStateException("These datatypes are not possible in extentions."); } } return optionsBuilder.build(); } }
class ProtoSchemaTranslator { public static final String SCHEMA_OPTION_META_NUMBER = "beam:option:proto:meta:number"; public static final String SCHEMA_OPTION_META_TYPE_NAME = "beam:option:proto:meta:type_name"; /** Option prefix for options on messages. */ public static final String SCHEMA_OPTION_MESSAGE_PREFIX = "beam:option:proto:message:"; /** Option prefix for options on fields. */ public static final String SCHEMA_OPTION_FIELD_PREFIX = "beam:option:proto:field:"; /** Attach a proto field number to a type. */ static Field withFieldNumber(Field field, int number) { return field.withOptions( Schema.Options.builder().setOption(SCHEMA_OPTION_META_NUMBER, FieldType.INT32, number)); } /** Return the proto field number for a type. */ static int getFieldNumber(Field field) { return field.getOptions().getValue(SCHEMA_OPTION_META_NUMBER); } /** Return a Beam scheam representing a proto class. */ static Schema getSchema(Class<? extends Message> clazz) { return getSchema(ProtobufUtil.getDescriptorForClass(clazz)); } private static FieldType beamFieldTypeFromProtoField( Descriptors.FieldDescriptor protoFieldDescriptor) { FieldType fieldType = null; if (protoFieldDescriptor.isMapField()) { FieldDescriptor keyFieldDescriptor = protoFieldDescriptor.getMessageType().findFieldByName("key"); FieldDescriptor valueFieldDescriptor = protoFieldDescriptor.getMessageType().findFieldByName("value"); fieldType = FieldType.map( beamFieldTypeFromProtoField(keyFieldDescriptor).withNullable(false), beamFieldTypeFromProtoField(valueFieldDescriptor).withNullable(false)); } else if (protoFieldDescriptor.isRepeated()) { fieldType = FieldType.array( beamFieldTypeFromSingularProtoField(protoFieldDescriptor).withNullable(false)); } else { fieldType = beamFieldTypeFromSingularProtoField(protoFieldDescriptor); } return fieldType; } private static FieldType beamFieldTypeFromSingularProtoField( Descriptors.FieldDescriptor protoFieldDescriptor) { Descriptors.FieldDescriptor.Type fieldDescriptor = protoFieldDescriptor.getType(); FieldType fieldType; switch (fieldDescriptor) { case INT32: fieldType = FieldType.INT32; break; case INT64: fieldType = FieldType.INT64; break; case FLOAT: fieldType = FieldType.FLOAT; break; case DOUBLE: fieldType = FieldType.DOUBLE; break; case BOOL: fieldType = FieldType.BOOLEAN; break; case STRING: fieldType = FieldType.STRING; break; case BYTES: fieldType = FieldType.BYTES; break; case UINT32: fieldType = FieldType.logicalType(new UInt32()); break; case SINT32: fieldType = FieldType.logicalType(new SInt32()); break; case FIXED32: fieldType = FieldType.logicalType(new Fixed32()); break; case SFIXED32: fieldType = FieldType.logicalType(new SFixed32()); break; case UINT64: fieldType = FieldType.logicalType(new UInt64()); break; case SINT64: fieldType = FieldType.logicalType(new SInt64()); break; case FIXED64: fieldType = FieldType.logicalType(new Fixed64()); break; case SFIXED64: fieldType = FieldType.logicalType(new SFixed64()); break; case ENUM: Map<String, Integer> enumValues = Maps.newHashMap(); for (EnumValueDescriptor enumValue : protoFieldDescriptor.getEnumType().getValues()) { if (enumValues.putIfAbsent(enumValue.getName(), enumValue.getNumber()) != null) { throw new RuntimeException("Aliased enumerations not currently supported."); } } fieldType = FieldType.logicalType(EnumerationType.create(enumValues)); break; case MESSAGE: case GROUP: String fullName = protoFieldDescriptor.getMessageType().getFullName(); switch (fullName) { case "google.protobuf.Timestamp": fieldType = FieldType.logicalType(new NanosInstant()); break; case "google.protobuf.Int32Value": case "google.protobuf.UInt32Value": case "google.protobuf.Int64Value": case "google.protobuf.UInt64Value": case "google.protobuf.FloatValue": case "google.protobuf.DoubleValue": case "google.protobuf.StringValue": case "google.protobuf.BoolValue": case "google.protobuf.BytesValue": fieldType = beamFieldTypeFromSingularProtoField( protoFieldDescriptor.getMessageType().findFieldByNumber(1)); break; case "google.protobuf.Duration": fieldType = FieldType.logicalType(new NanosDuration()); break; case "google.protobuf.Any": throw new RuntimeException("Any not yet supported"); default: fieldType = FieldType.row(getSchema(protoFieldDescriptor.getMessageType())); } if (protoFieldDescriptor.isOptional()) { fieldType = fieldType.withNullable(true); } break; default: throw new RuntimeException("Field type not matched."); } return fieldType; } private static Schema.Options.Builder getFieldOptions(FieldDescriptor fieldDescriptor) { return getOptions(SCHEMA_OPTION_FIELD_PREFIX, fieldDescriptor.getOptions().getAllFields()); } private static Schema.Options.Builder getSchemaOptions(Descriptors.Descriptor descriptor) { return getOptions(SCHEMA_OPTION_MESSAGE_PREFIX, descriptor.getOptions().getAllFields()); } private static Schema.Options.Builder getOptions( String prefix, Map<FieldDescriptor, Object> allFields) { Schema.Options.Builder optionsBuilder = Schema.Options.builder(); for (Map.Entry<FieldDescriptor, Object> entry : allFields.entrySet()) { FieldDescriptor fieldDescriptor = entry.getKey(); FieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor); switch (fieldType.getTypeName()) { case BYTE: case BYTES: case INT16: case INT32: case INT64: case DECIMAL: case FLOAT: case DOUBLE: case STRING: case BOOLEAN: case LOGICAL_TYPE: case ROW: case ARRAY: case ITERABLE: Field field = Field.of("OPTION", fieldType); ProtoDynamicMessageSchema schema = ProtoDynamicMessageSchema.forSchema(Schema.of(field)); optionsBuilder.setOption( prefix + fieldDescriptor.getFullName(), fieldType, schema.createConverter(field).convertFromProtoValue(entry.getValue())); break; case MAP: case DATETIME: default: throw new IllegalStateException("These datatypes are not possible in extentions."); } } return optionsBuilder; } }
Yeah, it'll be created in `createInner`, through `ensureDefaultPipDefinition`: https://github.com/Azure/azure-sdk-for-java/blob/654650746b7294af2f81457cde7db9afbbee76e2/sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/ApplicationGatewayImpl.java#L563-L573 Maybe we'll set pip sku to `STANDARD` here if it's not legacy gateway.
public void canCreateApplicationGatewayWithDefaultSku() { String appGatewayName = generateRandomResourceName("agw", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .attach() .withExistingPublicIpAddress(pip) .create(); Assertions.assertEquals(ApplicationGatewayTier.BASIC, appGateway.tier()); Assertions.assertNotNull(appGateway.requestRoutingRules().get("rule1").priority()); }
.withSku(PublicIPSkuType.STANDARD)
public void canCreateApplicationGatewayWithDefaultSku() { String appGatewayName = generateRandomResourceName("agw", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .attach() .withExistingPublicIpAddress(pip) .create(); Assertions.assertEquals(ApplicationGatewayTier.BASIC, appGateway.tier()); Assertions.assertNotNull(appGateway.requestRoutingRules().get("rule1").priority()); }
class ApplicationGatewayTests extends NetworkManagementTest { @Test public void canCRUDApplicationGatewayWithWAF() throws Exception { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); Assertions.assertTrue(appGateway != null); Assertions.assertTrue(ApplicationGatewayTier.WAF_V2.equals(appGateway.tier())); Assertions.assertTrue(ApplicationGatewaySkuName.WAF_V2.equals(appGateway.size())); Assertions.assertTrue(appGateway.autoscaleConfiguration().minCapacity() == 2); Assertions.assertTrue(appGateway.autoscaleConfiguration().maxCapacity() == 5); ApplicationGatewayWebApplicationFirewallConfiguration config = appGateway.webApplicationFirewallConfiguration(); config.withFileUploadLimitInMb(200); config .withDisabledRuleGroups( Arrays .asList( new ApplicationGatewayFirewallDisabledRuleGroup() .withRuleGroupName("REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION"))); config.withRequestBodyCheck(true); config.withMaxRequestBodySizeInKb(64); config .withExclusions( Arrays .asList( new ApplicationGatewayFirewallExclusion() .withMatchVariable("RequestHeaderNames") .withSelectorMatchOperator("StartsWith") .withSelector("User-Agent"))); appGateway.update().withWebApplicationFirewall(config).apply(); appGateway.refresh(); Assertions.assertTrue(appGateway.webApplicationFirewallConfiguration().fileUploadLimitInMb() == 200); Assertions.assertTrue(appGateway.webApplicationFirewallConfiguration().requestBodyCheck()); Assertions .assertEquals(appGateway.webApplicationFirewallConfiguration().maxRequestBodySizeInKb(), (Integer) 64); Assertions.assertEquals(appGateway.webApplicationFirewallConfiguration().exclusions().size(), 1); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().exclusions().get(0).matchVariable(), "RequestHeaderNames"); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().exclusions().get(0).selectorMatchOperator(), "StartsWith"); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().exclusions().get(0).selector(), "User-Agent"); Assertions.assertEquals(appGateway.webApplicationFirewallConfiguration().disabledRuleGroups().size(), 1); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().disabledRuleGroups().get(0).ruleGroupName(), "REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION"); } @Test public void canSpecifyWildcardListeners() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); String listener1 = "listener1"; String hostname1 = "my.contoso.com"; ApplicationGateway gateway = networkManager.applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule80") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .withCookieBasedAffinity() .attach() .defineListener(listener1) .withPublicFrontend() .withFrontendPort(9000) .withHttp() .withHostname(hostname1) .attach() .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withExistingPublicIpAddress(pip) .create(); Assertions.assertEquals(hostname1, gateway.listeners().get(listener1).hostname()); String hostname2 = "*.contoso.com"; gateway.update() .updateListener(listener1) .withHostname(hostname2) .parent() .apply(); Assertions.assertEquals(hostname2, gateway.listeners().get(listener1).hostname()); List<String> hostnames = new ArrayList<>(); hostnames.add(hostname1); hostnames.add(hostname2); gateway.update() .updateListener(listener1) .withHostnames(hostnames) .parent() .apply(); Assertions.assertEquals(hostnames, gateway.listeners().get(listener1).hostnames()); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateApplicationGatewayWithSecret() throws Exception { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); String identityName = generateRandomResourceName("id", 10); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); Identity identity = msiManager .identities() .define(identityName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .create(); Assertions.assertNotNull(identity.name()); Assertions.assertNotNull(identity.principalId()); Secret secret1 = createKeyVaultSecret(clientIdFromFile(), identity.principalId()); Secret secret2 = createKeyVaultSecret(clientIdFromFile(), identity.principalId()); ManagedServiceIdentity serviceIdentity = createManagedServiceIdentityFromIdentity(identity); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpsPort(443) .withSslCertificate("ssl1") .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withIdentity(serviceIdentity) .defineSslCertificate("ssl1") .withKeyVaultSecretId(secret1.id()) .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); Assertions.assertEquals(secret1.id(), appGateway.sslCertificates().get("ssl1").keyVaultSecretId()); Assertions .assertEquals( secret1.id(), appGateway.requestRoutingRules().get("rule1").sslCertificate().keyVaultSecretId()); appGateway = appGateway.update().defineSslCertificate("ssl2").withKeyVaultSecretId(secret2.id()).attach().apply(); Assertions.assertEquals(secret2.id(), appGateway.sslCertificates().get("ssl2").keyVaultSecretId()); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateApplicationGatewayWithSslCertificate() throws Exception { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); String identityName = generateRandomResourceName("id", 10); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); Identity identity = msiManager .identities() .define(identityName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .create(); Assertions.assertNotNull(identity.name()); Assertions.assertNotNull(identity.principalId()); ManagedServiceIdentity serviceIdentity = createManagedServiceIdentityFromIdentity(identity); String secretId = createKeyVaultCertificate(clientIdFromFile(), identity.principalId()); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpsPort(443) .withSslCertificate("ssl1") .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withIdentity(serviceIdentity) .defineSslCertificate("ssl1") .withKeyVaultSecretId(secretId) .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); Assertions.assertEquals(secretId, appGateway.sslCertificates().get("ssl1").keyVaultSecretId()); Assertions.assertEquals(secretId, appGateway.requestRoutingRules().get("rule1").sslCertificate().keyVaultSecretId()); } @Test public void canAutoAssignPriorityForRequestRoutingRulesWithWAF() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .defineRequestRoutingRule("rule2") .fromPublicFrontend() .fromFrontendHttpPort(81) .toBackendHttpPort(8181) .toBackendIPAddress("11.1.1.3") .attach() .defineRequestRoutingRule("rule3") .fromPublicFrontend() .fromFrontendHttpPort(83) .toBackendHttpPort(8383) .toBackendIPAddress("11.1.1.4") .withPriority(1) .attach() .defineRequestRoutingRule("rule4") .fromPublicFrontend() .fromFrontendHttpPort(84) .toBackendHttpPort(8384) .toBackendIPAddress("11.1.1.5") .withPriority(20000) .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); appGateway.update() .defineRequestRoutingRule("rule5") .fromPublicFrontend() .fromFrontendHttpPort(82) .toBackendHttpPort(8282) .toBackendIPAddress("11.1.1.6") .attach() .apply(); Integer rule1Priority = appGateway.requestRoutingRules().get("rule1").priority(); Integer rule2Priority = appGateway.requestRoutingRules().get("rule2").priority(); Integer rule5Priority = appGateway.requestRoutingRules().get("rule5").priority(); Assertions.assertTrue(rule1Priority < rule5Priority && rule2Priority < rule5Priority); Assertions.assertEquals(1, appGateway.requestRoutingRules().get("rule3").priority()); Assertions.assertEquals(20000, appGateway.requestRoutingRules().get("rule4").priority()); appGateway.update() .defineRequestRoutingRule("rule6") .fromPublicFrontend() .fromFrontendHttpPort(85) .toBackendHttpPort(8585) .toBackendIPAddress("11.1.1.7") .attach() .defineRequestRoutingRule("rule7") .fromPublicFrontend() .fromFrontendHttpPort(86) .toBackendHttpPort(8686) .toBackendIPAddress("11.1.1.8") .withPriority(10040) .attach() .apply(); Assertions.assertEquals(10050, appGateway.requestRoutingRules().get("rule6").priority()); appGateway.update() .updateRequestRoutingRule("rule3") .withPriority(2) .parent() .apply(); Assertions.assertEquals(2, appGateway.requestRoutingRules().get("rule3").priority()); } @Test public void testAddRemoveIpAddressFromWafV2WithExclusionsEqualsAny() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall( new ApplicationGatewayWebApplicationFirewallConfiguration() .withEnabled(true) .withFirewallMode(ApplicationGatewayFirewallMode.PREVENTION) .withRuleSetType("OWASP") .withRuleSetVersion("3.0") .withExclusions(Collections.singletonList( new ApplicationGatewayFirewallExclusion() .withMatchVariable("RequestHeaderNames") .withSelectorMatchOperator(null) .withSelector(null) )) ) .create(); Assertions.assertEquals("RequestHeaderNames", appGateway.webApplicationFirewallConfiguration().exclusions().iterator().next().matchVariable()); Assertions.assertNull(appGateway.webApplicationFirewallConfiguration().exclusions().iterator().next().selectorMatchOperator()); Map<String, ApplicationGatewayBackend> backends = appGateway.backends(); backends.forEach((name, backend) -> backend.addresses().forEach(addr -> appGateway.update() .updateBackend(name) .withoutIPAddress(addr.ipAddress()) .parent() .apply())); } @Test public void canAssociateWafPolicy() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); String wafPolicyName = generateRandomResourceName("waf", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); WebApplicationFirewallPolicy wafPolicy = networkManager .webApplicationFirewallPolicies() .define(wafPolicyName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withManagedRuleSet(KnownWebApplicationGatewayManagedRuleSet.OWASP_3_2) .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withExistingWebApplicationFirewallPolicy(wafPolicy) .create(); Assertions.assertNotNull(appGateway.getWebApplicationFirewallPolicy()); Assertions.assertNull(appGateway.webApplicationFirewallConfiguration()); wafPolicy.refresh(); Assertions.assertEquals(appGateway.id(), wafPolicy.getAssociatedApplicationGateways().iterator().next().id()); Assertions.assertEquals(wafPolicy.id(), appGateway.getWebApplicationFirewallPolicy().id()); appGateway.update() .withNewWebApplicationFirewallPolicy(WebApplicationFirewallMode.PREVENTION) .apply(); WebApplicationFirewallPolicy newPolicy = appGateway.getWebApplicationFirewallPolicy(); Assertions.assertNotNull(newPolicy); Assertions.assertTrue(newPolicy.isEnabled()); Assertions.assertEquals(WebApplicationFirewallMode.PREVENTION, newPolicy.mode()); Assertions.assertNotEquals(newPolicy.id(), wafPolicy.id()); Assertions.assertEquals(appGateway.id(), newPolicy.getAssociatedApplicationGateways().iterator().next().id()); Assertions.assertEquals(newPolicy.id(), appGateway.getWebApplicationFirewallPolicy().id()); String invalidPolicyName = "invalid"; Assertions.assertThrows(IllegalStateException.class, () -> { networkManager.applicationGateways() .define("invalid") .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withNewPublicIpAddress() .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withNewWebApplicationFirewallPolicy( networkManager .webApplicationFirewallPolicies() .define(invalidPolicyName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withManagedRuleSet(KnownWebApplicationGatewayManagedRuleSet.OWASP_3_2)) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); }); Assertions.assertTrue( networkManager .webApplicationFirewallPolicies() .listByResourceGroup(rgName) .stream() .noneMatch(policy -> policy.name().equals(invalidPolicyName))); } @Test public void canSetSslPolicy() { String appGatewayName = generateRandomResourceName("agw", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withPredefinedSslPolicy(ApplicationGatewaySslPolicyName.APP_GW_SSL_POLICY20150501) .create(); ApplicationGatewaySslPolicy sslPolicy = appGateway.sslPolicy(); Assertions.assertNotNull(sslPolicy); Assertions.assertEquals(ApplicationGatewaySslPolicyType.PREDEFINED, sslPolicy.policyType()); Assertions.assertEquals(ApplicationGatewaySslPolicyName.APP_GW_SSL_POLICY20150501, sslPolicy.policyName()); appGateway.update() .withCustomV2SslPolicy(ApplicationGatewaySslProtocol.TLSV1_2, Collections.singletonList(ApplicationGatewaySslCipherSuite.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256)) .apply(); sslPolicy = appGateway.sslPolicy(); Assertions.assertNotNull(sslPolicy); Assertions.assertEquals(ApplicationGatewaySslPolicyType.CUSTOM_V2, sslPolicy.policyType()); Assertions.assertNull(sslPolicy.policyName()); Assertions.assertEquals(ApplicationGatewaySslProtocol.TLSV1_2, sslPolicy.minProtocolVersion()); Assertions.assertTrue(sslPolicy.cipherSuites().contains(ApplicationGatewaySslCipherSuite.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256)); Assertions.assertThrows(ManagementException.class, () -> { appGateway.update() .withSslPolicy(new ApplicationGatewaySslPolicy() .withPolicyType(ApplicationGatewaySslPolicyType.PREDEFINED) .withPolicyName(ApplicationGatewaySslPolicyName.APP_GW_SSL_POLICY20150501) .withMinProtocolVersion(ApplicationGatewaySslProtocol.TLSV1_1)) .apply(); }); } @Test private String createKeyVaultCertificate(String servicePrincipal, String identityPrincipal) { String vaultName = generateRandomResourceName("vlt", 10); String secretName = generateRandomResourceName("srt", 10); Vault vault = keyVaultManager .vaults() .define(vaultName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(servicePrincipal) .allowSecretAllPermissions() .allowCertificateAllPermissions() .attach() .defineAccessPolicy() .forObjectId(identityPrincipal) .allowSecretAllPermissions() .attach() .withAccessFromAzureServices() .withDeploymentEnabled() .create(); CertificateClient certificateClient = new CertificateClientBuilder() .vaultUrl(vault.vaultUri()) .pipeline(vault.vaultHttpPipeline()) .buildClient(); KeyVaultCertificateWithPolicy certificate = certificateClient.beginCreateCertificate(secretName, CertificatePolicy.getDefault()).getFinalResult(); return certificate.getSecretId(); } private Secret createKeyVaultSecret(String servicePrincipal, String identityPrincipal) throws Exception { String vaultName = generateRandomResourceName("vlt", 10); String secretName = generateRandomResourceName("srt", 10); BufferedReader buff = new BufferedReader(new FileReader(new File(getClass().getClassLoader() .getResource("test.certificate").getFile()))); String secretValue = buff.readLine(); Vault vault = keyVaultManager .vaults() .define(vaultName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(servicePrincipal) .allowSecretAllPermissions() .attach() .defineAccessPolicy() .forObjectId(identityPrincipal) .allowSecretAllPermissions() .attach() .withAccessFromAzureServices() .withDeploymentEnabled() .create(); return vault.secrets().define(secretName).withValue(secretValue).create(); } private static ManagedServiceIdentity createManagedServiceIdentityFromIdentity(Identity identity) throws Exception { ObjectMapper mapper = new ObjectMapper(); JsonNode userAssignedIdentitiesValueObject = mapper.createObjectNode(); ((ObjectNode) userAssignedIdentitiesValueObject).put("principalId", identity.principalId()); ((ObjectNode) userAssignedIdentitiesValueObject).put("clientId", identity.clientId()); ManagedServiceIdentityUserAssignedIdentities userAssignedIdentitiesValue = new JacksonAdapter() .deserialize( mapper.writerWithDefaultPrettyPrinter().writeValueAsString(userAssignedIdentitiesValueObject), ManagedServiceIdentityUserAssignedIdentities.class, SerializerEncoding.JSON); Map<String, ManagedServiceIdentityUserAssignedIdentities> userAssignedIdentities = new HashMap<>(); userAssignedIdentities.put(identity.id(), userAssignedIdentitiesValue); ManagedServiceIdentity serviceIdentity = new ManagedServiceIdentity(); serviceIdentity.withType(ResourceIdentityType.USER_ASSIGNED); serviceIdentity.withUserAssignedIdentities(userAssignedIdentities); return serviceIdentity; } }
class ApplicationGatewayTests extends NetworkManagementTest { @Test public void canCRUDApplicationGatewayWithWAF() throws Exception { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); Assertions.assertTrue(appGateway != null); Assertions.assertTrue(ApplicationGatewayTier.WAF_V2.equals(appGateway.tier())); Assertions.assertTrue(ApplicationGatewaySkuName.WAF_V2.equals(appGateway.size())); Assertions.assertTrue(appGateway.autoscaleConfiguration().minCapacity() == 2); Assertions.assertTrue(appGateway.autoscaleConfiguration().maxCapacity() == 5); ApplicationGatewayWebApplicationFirewallConfiguration config = appGateway.webApplicationFirewallConfiguration(); config.withFileUploadLimitInMb(200); config .withDisabledRuleGroups( Arrays .asList( new ApplicationGatewayFirewallDisabledRuleGroup() .withRuleGroupName("REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION"))); config.withRequestBodyCheck(true); config.withMaxRequestBodySizeInKb(64); config .withExclusions( Arrays .asList( new ApplicationGatewayFirewallExclusion() .withMatchVariable("RequestHeaderNames") .withSelectorMatchOperator("StartsWith") .withSelector("User-Agent"))); appGateway.update().withWebApplicationFirewall(config).apply(); appGateway.refresh(); Assertions.assertTrue(appGateway.webApplicationFirewallConfiguration().fileUploadLimitInMb() == 200); Assertions.assertTrue(appGateway.webApplicationFirewallConfiguration().requestBodyCheck()); Assertions .assertEquals(appGateway.webApplicationFirewallConfiguration().maxRequestBodySizeInKb(), (Integer) 64); Assertions.assertEquals(appGateway.webApplicationFirewallConfiguration().exclusions().size(), 1); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().exclusions().get(0).matchVariable(), "RequestHeaderNames"); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().exclusions().get(0).selectorMatchOperator(), "StartsWith"); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().exclusions().get(0).selector(), "User-Agent"); Assertions.assertEquals(appGateway.webApplicationFirewallConfiguration().disabledRuleGroups().size(), 1); Assertions .assertEquals( appGateway.webApplicationFirewallConfiguration().disabledRuleGroups().get(0).ruleGroupName(), "REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION"); } @Test public void canSpecifyWildcardListeners() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); String listener1 = "listener1"; String hostname1 = "my.contoso.com"; ApplicationGateway gateway = networkManager.applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule80") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .withCookieBasedAffinity() .attach() .defineListener(listener1) .withPublicFrontend() .withFrontendPort(9000) .withHttp() .withHostname(hostname1) .attach() .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withExistingPublicIpAddress(pip) .create(); Assertions.assertEquals(hostname1, gateway.listeners().get(listener1).hostname()); String hostname2 = "*.contoso.com"; gateway.update() .updateListener(listener1) .withHostname(hostname2) .parent() .apply(); Assertions.assertEquals(hostname2, gateway.listeners().get(listener1).hostname()); List<String> hostnames = new ArrayList<>(); hostnames.add(hostname1); hostnames.add(hostname2); gateway.update() .updateListener(listener1) .withHostnames(hostnames) .parent() .apply(); Assertions.assertEquals(hostnames, gateway.listeners().get(listener1).hostnames()); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateApplicationGatewayWithSecret() throws Exception { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); String identityName = generateRandomResourceName("id", 10); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); Identity identity = msiManager .identities() .define(identityName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .create(); Assertions.assertNotNull(identity.name()); Assertions.assertNotNull(identity.principalId()); Secret secret1 = createKeyVaultSecret(clientIdFromFile(), identity.principalId()); Secret secret2 = createKeyVaultSecret(clientIdFromFile(), identity.principalId()); ManagedServiceIdentity serviceIdentity = createManagedServiceIdentityFromIdentity(identity); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpsPort(443) .withSslCertificate("ssl1") .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withIdentity(serviceIdentity) .defineSslCertificate("ssl1") .withKeyVaultSecretId(secret1.id()) .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); Assertions.assertEquals(secret1.id(), appGateway.sslCertificates().get("ssl1").keyVaultSecretId()); Assertions .assertEquals( secret1.id(), appGateway.requestRoutingRules().get("rule1").sslCertificate().keyVaultSecretId()); appGateway = appGateway.update().defineSslCertificate("ssl2").withKeyVaultSecretId(secret2.id()).attach().apply(); Assertions.assertEquals(secret2.id(), appGateway.sslCertificates().get("ssl2").keyVaultSecretId()); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateApplicationGatewayWithSslCertificate() throws Exception { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); String identityName = generateRandomResourceName("id", 10); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); Identity identity = msiManager .identities() .define(identityName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .create(); Assertions.assertNotNull(identity.name()); Assertions.assertNotNull(identity.principalId()); ManagedServiceIdentity serviceIdentity = createManagedServiceIdentityFromIdentity(identity); String secretId = createKeyVaultCertificate(clientIdFromFile(), identity.principalId()); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpsPort(443) .withSslCertificate("ssl1") .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withIdentity(serviceIdentity) .defineSslCertificate("ssl1") .withKeyVaultSecretId(secretId) .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); Assertions.assertEquals(secretId, appGateway.sslCertificates().get("ssl1").keyVaultSecretId()); Assertions.assertEquals(secretId, appGateway.requestRoutingRules().get("rule1").sslCertificate().keyVaultSecretId()); } @Test public void canAutoAssignPriorityForRequestRoutingRulesWithWAF() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .defineRequestRoutingRule("rule2") .fromPublicFrontend() .fromFrontendHttpPort(81) .toBackendHttpPort(8181) .toBackendIPAddress("11.1.1.3") .attach() .defineRequestRoutingRule("rule3") .fromPublicFrontend() .fromFrontendHttpPort(83) .toBackendHttpPort(8383) .toBackendIPAddress("11.1.1.4") .withPriority(1) .attach() .defineRequestRoutingRule("rule4") .fromPublicFrontend() .fromFrontendHttpPort(84) .toBackendHttpPort(8384) .toBackendIPAddress("11.1.1.5") .withPriority(20000) .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); appGateway.update() .defineRequestRoutingRule("rule5") .fromPublicFrontend() .fromFrontendHttpPort(82) .toBackendHttpPort(8282) .toBackendIPAddress("11.1.1.6") .attach() .apply(); Integer rule1Priority = appGateway.requestRoutingRules().get("rule1").priority(); Integer rule2Priority = appGateway.requestRoutingRules().get("rule2").priority(); Integer rule5Priority = appGateway.requestRoutingRules().get("rule5").priority(); Assertions.assertTrue(rule1Priority < rule5Priority && rule2Priority < rule5Priority); Assertions.assertEquals(1, appGateway.requestRoutingRules().get("rule3").priority()); Assertions.assertEquals(20000, appGateway.requestRoutingRules().get("rule4").priority()); appGateway.update() .defineRequestRoutingRule("rule6") .fromPublicFrontend() .fromFrontendHttpPort(85) .toBackendHttpPort(8585) .toBackendIPAddress("11.1.1.7") .attach() .defineRequestRoutingRule("rule7") .fromPublicFrontend() .fromFrontendHttpPort(86) .toBackendHttpPort(8686) .toBackendIPAddress("11.1.1.8") .withPriority(10040) .attach() .apply(); Assertions.assertEquals(10050, appGateway.requestRoutingRules().get("rule6").priority()); appGateway.update() .updateRequestRoutingRule("rule3") .withPriority(2) .parent() .apply(); Assertions.assertEquals(2, appGateway.requestRoutingRules().get("rule3").priority()); } @Test public void testAddRemoveIpAddressFromWafV2WithExclusionsEqualsAny() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withAutoScale(2, 5) .withWebApplicationFirewall( new ApplicationGatewayWebApplicationFirewallConfiguration() .withEnabled(true) .withFirewallMode(ApplicationGatewayFirewallMode.PREVENTION) .withRuleSetType("OWASP") .withRuleSetVersion("3.0") .withExclusions(Collections.singletonList( new ApplicationGatewayFirewallExclusion() .withMatchVariable("RequestHeaderNames") .withSelectorMatchOperator(null) .withSelector(null) )) ) .create(); Assertions.assertEquals("RequestHeaderNames", appGateway.webApplicationFirewallConfiguration().exclusions().iterator().next().matchVariable()); Assertions.assertNull(appGateway.webApplicationFirewallConfiguration().exclusions().iterator().next().selectorMatchOperator()); Map<String, ApplicationGatewayBackend> backends = appGateway.backends(); backends.forEach((name, backend) -> backend.addresses().forEach(addr -> appGateway.update() .updateBackend(name) .withoutIPAddress(addr.ipAddress()) .parent() .apply())); } @Test public void canAssociateWafPolicy() { String appGatewayName = generateRandomResourceName("agwaf", 15); String appPublicIp = generateRandomResourceName("pip", 15); String wafPolicyName = generateRandomResourceName("waf", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); WebApplicationFirewallPolicy wafPolicy = networkManager .webApplicationFirewallPolicies() .define(wafPolicyName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withManagedRuleSet(KnownWebApplicationGatewayManagedRuleSet.OWASP_3_2) .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withExistingWebApplicationFirewallPolicy(wafPolicy) .create(); Assertions.assertNotNull(appGateway.getWebApplicationFirewallPolicy()); Assertions.assertNull(appGateway.webApplicationFirewallConfiguration()); wafPolicy.refresh(); Assertions.assertEquals(appGateway.id(), wafPolicy.getAssociatedApplicationGateways().iterator().next().id()); Assertions.assertEquals(wafPolicy.id(), appGateway.getWebApplicationFirewallPolicy().id()); appGateway.update() .withNewWebApplicationFirewallPolicy(WebApplicationFirewallMode.PREVENTION) .apply(); WebApplicationFirewallPolicy newPolicy = appGateway.getWebApplicationFirewallPolicy(); Assertions.assertNotNull(newPolicy); Assertions.assertTrue(newPolicy.isEnabled()); Assertions.assertEquals(WebApplicationFirewallMode.PREVENTION, newPolicy.mode()); Assertions.assertNotEquals(newPolicy.id(), wafPolicy.id()); Assertions.assertEquals(appGateway.id(), newPolicy.getAssociatedApplicationGateways().iterator().next().id()); Assertions.assertEquals(newPolicy.id(), appGateway.getWebApplicationFirewallPolicy().id()); String invalidPolicyName = "invalid"; Assertions.assertThrows(IllegalStateException.class, () -> { networkManager.applicationGateways() .define("invalid") .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .toBackendIPAddress("11.1.1.2") .attach() .withNewPublicIpAddress() .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withNewWebApplicationFirewallPolicy( networkManager .webApplicationFirewallPolicies() .define(invalidPolicyName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withManagedRuleSet(KnownWebApplicationGatewayManagedRuleSet.OWASP_3_2)) .withWebApplicationFirewall(true, ApplicationGatewayFirewallMode.PREVENTION) .create(); }); Assertions.assertTrue( networkManager .webApplicationFirewallPolicies() .listByResourceGroup(rgName) .stream() .noneMatch(policy -> policy.name().equals(invalidPolicyName))); } @Test public void canSetSslPolicy() { String appGatewayName = generateRandomResourceName("agw", 15); String appPublicIp = generateRandomResourceName("pip", 15); PublicIpAddress pip = networkManager .publicIpAddresses() .define(appPublicIp) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withSku(PublicIPSkuType.STANDARD) .withStaticIP() .create(); ApplicationGateway appGateway = networkManager .applicationGateways() .define(appGatewayName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRequestRoutingRule("rule1") .fromPublicFrontend() .fromFrontendHttpPort(80) .toBackendHttpPort(8080) .toBackendIPAddress("11.1.1.1") .attach() .withExistingPublicIpAddress(pip) .withTier(ApplicationGatewayTier.WAF_V2) .withSize(ApplicationGatewaySkuName.WAF_V2) .withPredefinedSslPolicy(ApplicationGatewaySslPolicyName.APP_GW_SSL_POLICY20150501) .create(); ApplicationGatewaySslPolicy sslPolicy = appGateway.sslPolicy(); Assertions.assertNotNull(sslPolicy); Assertions.assertEquals(ApplicationGatewaySslPolicyType.PREDEFINED, sslPolicy.policyType()); Assertions.assertEquals(ApplicationGatewaySslPolicyName.APP_GW_SSL_POLICY20150501, sslPolicy.policyName()); appGateway.update() .withCustomV2SslPolicy(ApplicationGatewaySslProtocol.TLSV1_2, Collections.singletonList(ApplicationGatewaySslCipherSuite.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256)) .apply(); sslPolicy = appGateway.sslPolicy(); Assertions.assertNotNull(sslPolicy); Assertions.assertEquals(ApplicationGatewaySslPolicyType.CUSTOM_V2, sslPolicy.policyType()); Assertions.assertNull(sslPolicy.policyName()); Assertions.assertEquals(ApplicationGatewaySslProtocol.TLSV1_2, sslPolicy.minProtocolVersion()); Assertions.assertTrue(sslPolicy.cipherSuites().contains(ApplicationGatewaySslCipherSuite.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256)); Assertions.assertThrows(ManagementException.class, () -> { appGateway.update() .withSslPolicy(new ApplicationGatewaySslPolicy() .withPolicyType(ApplicationGatewaySslPolicyType.PREDEFINED) .withPolicyName(ApplicationGatewaySslPolicyName.APP_GW_SSL_POLICY20150501) .withMinProtocolVersion(ApplicationGatewaySslProtocol.TLSV1_1)) .apply(); }); } @Test private String createKeyVaultCertificate(String servicePrincipal, String identityPrincipal) { String vaultName = generateRandomResourceName("vlt", 10); String secretName = generateRandomResourceName("srt", 10); Vault vault = keyVaultManager .vaults() .define(vaultName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(servicePrincipal) .allowSecretAllPermissions() .allowCertificateAllPermissions() .attach() .defineAccessPolicy() .forObjectId(identityPrincipal) .allowSecretAllPermissions() .attach() .withAccessFromAzureServices() .withDeploymentEnabled() .create(); CertificateClient certificateClient = new CertificateClientBuilder() .vaultUrl(vault.vaultUri()) .pipeline(vault.vaultHttpPipeline()) .buildClient(); KeyVaultCertificateWithPolicy certificate = certificateClient.beginCreateCertificate(secretName, CertificatePolicy.getDefault()).getFinalResult(); return certificate.getSecretId(); } private Secret createKeyVaultSecret(String servicePrincipal, String identityPrincipal) throws Exception { String vaultName = generateRandomResourceName("vlt", 10); String secretName = generateRandomResourceName("srt", 10); BufferedReader buff = new BufferedReader(new FileReader(new File(getClass().getClassLoader() .getResource("test.certificate").getFile()))); String secretValue = buff.readLine(); Vault vault = keyVaultManager .vaults() .define(vaultName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(servicePrincipal) .allowSecretAllPermissions() .attach() .defineAccessPolicy() .forObjectId(identityPrincipal) .allowSecretAllPermissions() .attach() .withAccessFromAzureServices() .withDeploymentEnabled() .create(); return vault.secrets().define(secretName).withValue(secretValue).create(); } private static ManagedServiceIdentity createManagedServiceIdentityFromIdentity(Identity identity) throws Exception { ObjectMapper mapper = new ObjectMapper(); JsonNode userAssignedIdentitiesValueObject = mapper.createObjectNode(); ((ObjectNode) userAssignedIdentitiesValueObject).put("principalId", identity.principalId()); ((ObjectNode) userAssignedIdentitiesValueObject).put("clientId", identity.clientId()); ManagedServiceIdentityUserAssignedIdentities userAssignedIdentitiesValue = new JacksonAdapter() .deserialize( mapper.writerWithDefaultPrettyPrinter().writeValueAsString(userAssignedIdentitiesValueObject), ManagedServiceIdentityUserAssignedIdentities.class, SerializerEncoding.JSON); Map<String, ManagedServiceIdentityUserAssignedIdentities> userAssignedIdentities = new HashMap<>(); userAssignedIdentities.put(identity.id(), userAssignedIdentitiesValue); ManagedServiceIdentity serviceIdentity = new ManagedServiceIdentity(); serviceIdentity.withType(ResourceIdentityType.USER_ASSIGNED); serviceIdentity.withUserAssignedIdentities(userAssignedIdentities); return serviceIdentity; } }
Please refactor this instruction code to a new method.
private void exec() { int i; int j; int cpIndex; FunctionCallCPEntry funcCallCPEntry; FunctionRefCPEntry funcRefCPEntry; TypeRefCPEntry typeRefCPEntry; FunctionInfo functionInfo; InstructionCALL callIns; boolean debugEnabled = programFile.getDebugger().isDebugEnabled(); StackFrame currentSF, callersSF; int callersRetRegIndex; while (ip >= 0 && ip < code.length && controlStack.currentFrame != null) { if (debugEnabled) { debug(); } Instruction instruction = code[ip]; int opcode = instruction.getOpcode(); int[] operands = instruction.getOperands(); ip++; StackFrame sf = controlStack.currentFrame; switch (opcode) { case InstructionCodes.ICONST: cpIndex = operands[0]; i = operands[1]; sf.longRegs[i] = ((IntegerCPEntry) constPool[cpIndex]).getValue(); break; case InstructionCodes.FCONST: cpIndex = operands[0]; i = operands[1]; sf.doubleRegs[i] = ((FloatCPEntry) constPool[cpIndex]).getValue(); break; case InstructionCodes.SCONST: cpIndex = operands[0]; i = operands[1]; sf.stringRegs[i] = ((StringCPEntry) constPool[cpIndex]).getValue(); break; case InstructionCodes.ICONST_0: i = operands[0]; sf.longRegs[i] = 0; break; case InstructionCodes.ICONST_1: i = operands[0]; sf.longRegs[i] = 1; break; case InstructionCodes.ICONST_2: i = operands[0]; sf.longRegs[i] = 2; break; case InstructionCodes.ICONST_3: i = operands[0]; sf.longRegs[i] = 3; break; case InstructionCodes.ICONST_4: i = operands[0]; sf.longRegs[i] = 4; break; case InstructionCodes.ICONST_5: i = operands[0]; sf.longRegs[i] = 5; break; case InstructionCodes.FCONST_0: i = operands[0]; sf.doubleRegs[i] = 0; break; case InstructionCodes.FCONST_1: i = operands[0]; sf.doubleRegs[i] = 1; break; case InstructionCodes.FCONST_2: i = operands[0]; sf.doubleRegs[i] = 2; break; case InstructionCodes.FCONST_3: i = operands[0]; sf.doubleRegs[i] = 3; break; case InstructionCodes.FCONST_4: i = operands[0]; sf.doubleRegs[i] = 4; break; case InstructionCodes.FCONST_5: i = operands[0]; sf.doubleRegs[i] = 5; break; case InstructionCodes.BCONST_0: i = operands[0]; sf.intRegs[i] = 0; break; case InstructionCodes.BCONST_1: i = operands[0]; sf.intRegs[i] = 1; break; case InstructionCodes.RCONST_NULL: i = operands[0]; sf.refRegs[i] = null; break; case InstructionCodes.IMOVE: case InstructionCodes.FMOVE: case InstructionCodes.SMOVE: case InstructionCodes.BMOVE: case InstructionCodes.LMOVE: case InstructionCodes.RMOVE: case InstructionCodes.IALOAD: case InstructionCodes.FALOAD: case InstructionCodes.SALOAD: case InstructionCodes.BALOAD: case InstructionCodes.LALOAD: case InstructionCodes.RALOAD: case InstructionCodes.JSONALOAD: case InstructionCodes.IGLOAD: case InstructionCodes.FGLOAD: case InstructionCodes.SGLOAD: case InstructionCodes.BGLOAD: case InstructionCodes.LGLOAD: case InstructionCodes.RGLOAD: case InstructionCodes.IFIELDLOAD: case InstructionCodes.FFIELDLOAD: case InstructionCodes.SFIELDLOAD: case InstructionCodes.BFIELDLOAD: case InstructionCodes.LFIELDLOAD: case InstructionCodes.RFIELDLOAD: case InstructionCodes.MAPLOAD: case InstructionCodes.JSONLOAD: case InstructionCodes.ENUMERATORLOAD: execLoadOpcodes(sf, opcode, operands); break; case InstructionCodes.ISTORE: case InstructionCodes.FSTORE: case InstructionCodes.SSTORE: case InstructionCodes.BSTORE: case InstructionCodes.LSTORE: case InstructionCodes.RSTORE: case InstructionCodes.IASTORE: case InstructionCodes.FASTORE: case InstructionCodes.SASTORE: case InstructionCodes.BASTORE: case InstructionCodes.LASTORE: case InstructionCodes.RASTORE: case InstructionCodes.JSONASTORE: case InstructionCodes.IGSTORE: case InstructionCodes.FGSTORE: case InstructionCodes.SGSTORE: case InstructionCodes.BGSTORE: case InstructionCodes.LGSTORE: case InstructionCodes.RGSTORE: case InstructionCodes.IFIELDSTORE: case InstructionCodes.FFIELDSTORE: case InstructionCodes.SFIELDSTORE: case InstructionCodes.BFIELDSTORE: case InstructionCodes.LFIELDSTORE: case InstructionCodes.RFIELDSTORE: case InstructionCodes.MAPSTORE: case InstructionCodes.JSONSTORE: execStoreOpcodes(sf, opcode, operands); break; case InstructionCodes.IADD: case InstructionCodes.FADD: case InstructionCodes.SADD: case InstructionCodes.XMLADD: case InstructionCodes.ISUB: case InstructionCodes.FSUB: case InstructionCodes.IMUL: case InstructionCodes.FMUL: case InstructionCodes.IDIV: case InstructionCodes.FDIV: case InstructionCodes.IMOD: case InstructionCodes.FMOD: case InstructionCodes.INEG: case InstructionCodes.FNEG: case InstructionCodes.BNOT: case InstructionCodes.IEQ: case InstructionCodes.FEQ: case InstructionCodes.SEQ: case InstructionCodes.BEQ: case InstructionCodes.REQ: case InstructionCodes.TEQ: case InstructionCodes.INE: case InstructionCodes.FNE: case InstructionCodes.SNE: case InstructionCodes.BNE: case InstructionCodes.RNE: case InstructionCodes.TNE: execBinaryOpCodes(sf, opcode, operands); break; case InstructionCodes.LENGTHOF: i = operands[0]; cpIndex = operands[1]; j = operands[2]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; int typeTag = typeRefCPEntry.getType().getTag(); if (typeTag == TypeTags.STRING_TAG) { String value = sf.stringRegs[i]; if (value == null) { handleNullRefError(); } else { sf.longRegs[j] = value.length(); } break; } else if (typeTag == TypeTags.BLOB_TAG) { sf.longRegs[j] = sf.byteRegs[i].length; break; } BValue entity = sf.refRegs[i]; if (entity == null) { handleNullRefError(); break; } if (typeTag == TypeTags.XML_TAG) { sf.longRegs[j] = ((BXML) entity).length(); break; } else if (entity instanceof BJSON) { if (JSONUtils.isJSONArray((BJSON) entity)) { sf.longRegs[j] = JSONUtils.getJSONArrayLength((BJSON) sf.refRegs[i]); } else { sf.longRegs[j] = -1; } break; } else if (typeTag == TypeTags.MAP_TAG) { sf.longRegs[j] = ((BMap) entity).size(); break; } BNewArray newArray = (BNewArray) entity; sf.longRegs[j] = newArray.size(); break; case InstructionCodes.TYPELOAD: cpIndex = operands[0]; j = operands[1]; TypeRefCPEntry typeEntry = (TypeRefCPEntry) constPool[cpIndex]; sf.refRegs[j] = new BTypeValue(typeEntry.getType()); break; case InstructionCodes.TYPEOF: i = operands[0]; j = operands[1]; if (sf.refRegs[i] == null) { handleNullRefError(); break; } sf.refRegs[j] = new BTypeValue(sf.refRegs[i].getType()); break; case InstructionCodes.IGT: case InstructionCodes.FGT: case InstructionCodes.IGE: case InstructionCodes.FGE: case InstructionCodes.ILT: case InstructionCodes.FLT: case InstructionCodes.ILE: case InstructionCodes.FLE: case InstructionCodes.REQ_NULL: case InstructionCodes.RNE_NULL: case InstructionCodes.BR_TRUE: case InstructionCodes.BR_FALSE: case InstructionCodes.GOTO: case InstructionCodes.HALT: case InstructionCodes.SEQ_NULL: case InstructionCodes.SNE_NULL: execCmpAndBranchOpcodes(sf, opcode, operands); break; case InstructionCodes.TR_RETRY: i = operands[0]; j = operands[1]; retryTransaction(i, j); break; case InstructionCodes.CALL: callIns = (InstructionCALL) instruction; invokeCallableUnit(callIns.functionInfo, callIns.argRegs, callIns.retRegs); break; case InstructionCodes.NCALL: callIns = (InstructionCALL) instruction; invokeNativeFunction(callIns.functionInfo, callIns.argRegs, callIns.retRegs); break; case InstructionCodes.ACALL: InstructionACALL acallIns = (InstructionACALL) instruction; invokeAction(acallIns.actionName, acallIns.argRegs, acallIns.retRegs); break; case InstructionCodes.TCALL: InstructionTCALL tcallIns = (InstructionTCALL) instruction; invokeCallableUnit(tcallIns.transformerInfo, tcallIns.argRegs, tcallIns.retRegs); break; case InstructionCodes.TR_BEGIN: i = operands[0]; j = operands[1]; beginTransaction(i, j); break; case InstructionCodes.TR_END: i = operands[0]; endTransaction(i); break; case InstructionCodes.WRKSEND: InstructionWRKSendReceive wrkSendIns = (InstructionWRKSendReceive) instruction; handleWorkerSend(wrkSendIns.dataChannelInfo, wrkSendIns.types, wrkSendIns.regs); break; case InstructionCodes.WRKRECEIVE: InstructionWRKSendReceive wrkReceiveIns = (InstructionWRKSendReceive) instruction; handleWorkerReceive(wrkReceiveIns.dataChannelInfo, wrkReceiveIns.types, wrkReceiveIns.regs); break; case InstructionCodes.FORKJOIN: InstructionFORKJOIN forkJoinIns = (InstructionFORKJOIN) instruction; invokeForkJoin(forkJoinIns); break; case InstructionCodes.WRKSTART: startWorkers(); break; case InstructionCodes.WRKRETURN: handleWorkerReturn(); break; case InstructionCodes.THROW: i = operands[0]; if (i >= 0) { BStruct error = (BStruct) sf.refRegs[i]; if (error == null) { handleNullRefError(); break; } BLangVMErrors.setStackTrace(context, ip, error); context.setError(error); } handleError(); break; case InstructionCodes.ERRSTORE: i = operands[0]; sf.refRegs[i] = context.getError(); context.setError(null); break; case InstructionCodes.FPCALL: i = operands[0]; if (sf.refRegs[i] == null) { handleNullRefError(); break; } cpIndex = operands[1]; funcCallCPEntry = (FunctionCallCPEntry) constPool[cpIndex]; funcRefCPEntry = ((BFunctionPointer) sf.refRegs[i]).value(); functionInfo = funcRefCPEntry.getFunctionInfo(); if (functionInfo.isNative()) { invokeNativeFunction(functionInfo, funcCallCPEntry.getArgRegs(), funcCallCPEntry.getRetRegs()); } else { invokeCallableUnit(functionInfo, funcCallCPEntry.getArgRegs(), funcCallCPEntry.getRetRegs()); } break; case InstructionCodes.FPLOAD: i = operands[0]; j = operands[1]; funcRefCPEntry = (FunctionRefCPEntry) constPool[i]; sf.refRegs[j] = new BFunctionPointer(funcRefCPEntry); break; case InstructionCodes.I2ANY: case InstructionCodes.F2ANY: case InstructionCodes.S2ANY: case InstructionCodes.B2ANY: case InstructionCodes.L2ANY: case InstructionCodes.ANY2I: case InstructionCodes.ANY2F: case InstructionCodes.ANY2S: case InstructionCodes.ANY2B: case InstructionCodes.ANY2L: case InstructionCodes.ANY2JSON: case InstructionCodes.ANY2XML: case InstructionCodes.ANY2MAP: case InstructionCodes.ANY2TYPE: case InstructionCodes.ANY2E: case InstructionCodes.ANY2T: case InstructionCodes.ANY2C: case InstructionCodes.NULL2JSON: case InstructionCodes.CHECKCAST: case InstructionCodes.B2JSON: case InstructionCodes.JSON2I: case InstructionCodes.JSON2F: case InstructionCodes.JSON2S: case InstructionCodes.JSON2B: case InstructionCodes.NULL2S: execTypeCastOpcodes(sf, opcode, operands); break; case InstructionCodes.I2F: case InstructionCodes.I2S: case InstructionCodes.I2B: case InstructionCodes.I2JSON: case InstructionCodes.F2I: case InstructionCodes.F2S: case InstructionCodes.F2B: case InstructionCodes.F2JSON: case InstructionCodes.S2I: case InstructionCodes.S2F: case InstructionCodes.S2B: case InstructionCodes.S2JSON: case InstructionCodes.B2I: case InstructionCodes.B2F: case InstructionCodes.B2S: case InstructionCodes.DT2XML: case InstructionCodes.DT2JSON: case InstructionCodes.T2MAP: case InstructionCodes.T2JSON: case InstructionCodes.MAP2T: case InstructionCodes.JSON2T: case InstructionCodes.XMLATTRS2MAP: case InstructionCodes.S2XML: case InstructionCodes.S2JSONX: case InstructionCodes.XML2S: execTypeConversionOpcodes(sf, opcode, operands); break; case InstructionCodes.INEWARRAY: i = operands[0]; sf.refRegs[i] = new BIntArray(); break; case InstructionCodes.ARRAYLEN: i = operands[0]; j = operands[1]; BValue value = sf.refRegs[i]; if (value == null) { handleNullRefError(); break; } if (value.getType().getTag() == TypeTags.JSON_TAG) { sf.longRegs[j] = ((BJSON) value).value().size(); break; } sf.longRegs[j] = ((BNewArray) value).size(); break; case InstructionCodes.FNEWARRAY: i = operands[0]; sf.refRegs[i] = new BFloatArray(); break; case InstructionCodes.SNEWARRAY: i = operands[0]; sf.refRegs[i] = new BStringArray(); break; case InstructionCodes.BNEWARRAY: i = operands[0]; sf.refRegs[i] = new BBooleanArray(); break; case InstructionCodes.LNEWARRAY: i = operands[0]; sf.refRegs[i] = new BBlobArray(); break; case InstructionCodes.RNEWARRAY: i = operands[0]; cpIndex = operands[1]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; sf.refRegs[i] = new BRefValueArray(typeRefCPEntry.getType()); break; case InstructionCodes.JSONNEWARRAY: i = operands[0]; j = operands[1]; StringJoiner stringJoiner = new StringJoiner(",", "[", "]"); for (int index = 0; index < sf.longRegs[j]; index++) { stringJoiner.add(null); } sf.refRegs[i] = new BJSON(stringJoiner.toString()); break; case InstructionCodes.NEWSTRUCT: createNewStruct(operands, sf); break; case InstructionCodes.NEWCONNECTOR: createNewConnector(operands, sf); break; case InstructionCodes.NEWMAP: i = operands[0]; sf.refRegs[i] = new BMap<String, BRefType>(); break; case InstructionCodes.NEWJSON: i = operands[0]; cpIndex = operands[1]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; sf.refRegs[i] = new BJSON("{}", typeRefCPEntry.getType()); break; case InstructionCodes.NEWDATATABLE: i = operands[0]; sf.refRegs[i] = new BDataTable(null); break; case InstructionCodes.NEW_INT_RANGE: createNewIntRange(operands, sf); break; case InstructionCodes.IRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.longRegs[callersRetRegIndex] = currentSF.longRegs[j]; break; case InstructionCodes.FRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.doubleRegs[callersRetRegIndex] = currentSF.doubleRegs[j]; break; case InstructionCodes.SRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.stringRegs[callersRetRegIndex] = currentSF.stringRegs[j]; break; case InstructionCodes.BRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.intRegs[callersRetRegIndex] = currentSF.intRegs[j]; break; case InstructionCodes.LRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.byteRegs[callersRetRegIndex] = currentSF.byteRegs[j]; break; case InstructionCodes.RRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.refRegs[callersRetRegIndex] = currentSF.refRegs[j]; break; case InstructionCodes.RET: handleReturn(); break; case InstructionCodes.XMLATTRSTORE: case InstructionCodes.XMLATTRLOAD: case InstructionCodes.XML2XMLATTRS: case InstructionCodes.S2QNAME: case InstructionCodes.NEWQNAME: case InstructionCodes.NEWXMLELEMENT: case InstructionCodes.NEWXMLCOMMENT: case InstructionCodes.NEWXMLTEXT: case InstructionCodes.NEWXMLPI: case InstructionCodes.XMLSTORE: case InstructionCodes.XMLLOAD: execXMLOpcodes(sf, opcode, operands); break; case InstructionCodes.ITR_NEW: case InstructionCodes.ITR_NEXT: case InstructionCodes.ITR_HAS_NEXT: execIteratorOperation(sf, instruction); break; default: throw new UnsupportedOperationException(); } } }
cpIndex = operands[1];
private void exec() { int i; int j; int cpIndex; FunctionCallCPEntry funcCallCPEntry; FunctionRefCPEntry funcRefCPEntry; TypeRefCPEntry typeRefCPEntry; FunctionInfo functionInfo; InstructionCALL callIns; boolean debugEnabled = programFile.getDebugger().isDebugEnabled(); StackFrame currentSF, callersSF; int callersRetRegIndex; while (ip >= 0 && ip < code.length && controlStack.currentFrame != null) { if (debugEnabled) { debug(); } Instruction instruction = code[ip]; int opcode = instruction.getOpcode(); int[] operands = instruction.getOperands(); ip++; StackFrame sf = controlStack.currentFrame; switch (opcode) { case InstructionCodes.ICONST: cpIndex = operands[0]; i = operands[1]; sf.longRegs[i] = ((IntegerCPEntry) constPool[cpIndex]).getValue(); break; case InstructionCodes.FCONST: cpIndex = operands[0]; i = operands[1]; sf.doubleRegs[i] = ((FloatCPEntry) constPool[cpIndex]).getValue(); break; case InstructionCodes.SCONST: cpIndex = operands[0]; i = operands[1]; sf.stringRegs[i] = ((StringCPEntry) constPool[cpIndex]).getValue(); break; case InstructionCodes.ICONST_0: i = operands[0]; sf.longRegs[i] = 0; break; case InstructionCodes.ICONST_1: i = operands[0]; sf.longRegs[i] = 1; break; case InstructionCodes.ICONST_2: i = operands[0]; sf.longRegs[i] = 2; break; case InstructionCodes.ICONST_3: i = operands[0]; sf.longRegs[i] = 3; break; case InstructionCodes.ICONST_4: i = operands[0]; sf.longRegs[i] = 4; break; case InstructionCodes.ICONST_5: i = operands[0]; sf.longRegs[i] = 5; break; case InstructionCodes.FCONST_0: i = operands[0]; sf.doubleRegs[i] = 0; break; case InstructionCodes.FCONST_1: i = operands[0]; sf.doubleRegs[i] = 1; break; case InstructionCodes.FCONST_2: i = operands[0]; sf.doubleRegs[i] = 2; break; case InstructionCodes.FCONST_3: i = operands[0]; sf.doubleRegs[i] = 3; break; case InstructionCodes.FCONST_4: i = operands[0]; sf.doubleRegs[i] = 4; break; case InstructionCodes.FCONST_5: i = operands[0]; sf.doubleRegs[i] = 5; break; case InstructionCodes.BCONST_0: i = operands[0]; sf.intRegs[i] = 0; break; case InstructionCodes.BCONST_1: i = operands[0]; sf.intRegs[i] = 1; break; case InstructionCodes.RCONST_NULL: i = operands[0]; sf.refRegs[i] = null; break; case InstructionCodes.IMOVE: case InstructionCodes.FMOVE: case InstructionCodes.SMOVE: case InstructionCodes.BMOVE: case InstructionCodes.LMOVE: case InstructionCodes.RMOVE: case InstructionCodes.IALOAD: case InstructionCodes.FALOAD: case InstructionCodes.SALOAD: case InstructionCodes.BALOAD: case InstructionCodes.LALOAD: case InstructionCodes.RALOAD: case InstructionCodes.JSONALOAD: case InstructionCodes.IGLOAD: case InstructionCodes.FGLOAD: case InstructionCodes.SGLOAD: case InstructionCodes.BGLOAD: case InstructionCodes.LGLOAD: case InstructionCodes.RGLOAD: case InstructionCodes.IFIELDLOAD: case InstructionCodes.FFIELDLOAD: case InstructionCodes.SFIELDLOAD: case InstructionCodes.BFIELDLOAD: case InstructionCodes.LFIELDLOAD: case InstructionCodes.RFIELDLOAD: case InstructionCodes.MAPLOAD: case InstructionCodes.JSONLOAD: case InstructionCodes.ENUMERATORLOAD: execLoadOpcodes(sf, opcode, operands); break; case InstructionCodes.ISTORE: case InstructionCodes.FSTORE: case InstructionCodes.SSTORE: case InstructionCodes.BSTORE: case InstructionCodes.LSTORE: case InstructionCodes.RSTORE: case InstructionCodes.IASTORE: case InstructionCodes.FASTORE: case InstructionCodes.SASTORE: case InstructionCodes.BASTORE: case InstructionCodes.LASTORE: case InstructionCodes.RASTORE: case InstructionCodes.JSONASTORE: case InstructionCodes.IGSTORE: case InstructionCodes.FGSTORE: case InstructionCodes.SGSTORE: case InstructionCodes.BGSTORE: case InstructionCodes.LGSTORE: case InstructionCodes.RGSTORE: case InstructionCodes.IFIELDSTORE: case InstructionCodes.FFIELDSTORE: case InstructionCodes.SFIELDSTORE: case InstructionCodes.BFIELDSTORE: case InstructionCodes.LFIELDSTORE: case InstructionCodes.RFIELDSTORE: case InstructionCodes.MAPSTORE: case InstructionCodes.JSONSTORE: execStoreOpcodes(sf, opcode, operands); break; case InstructionCodes.IADD: case InstructionCodes.FADD: case InstructionCodes.SADD: case InstructionCodes.XMLADD: case InstructionCodes.ISUB: case InstructionCodes.FSUB: case InstructionCodes.IMUL: case InstructionCodes.FMUL: case InstructionCodes.IDIV: case InstructionCodes.FDIV: case InstructionCodes.IMOD: case InstructionCodes.FMOD: case InstructionCodes.INEG: case InstructionCodes.FNEG: case InstructionCodes.BNOT: case InstructionCodes.IEQ: case InstructionCodes.FEQ: case InstructionCodes.SEQ: case InstructionCodes.BEQ: case InstructionCodes.REQ: case InstructionCodes.TEQ: case InstructionCodes.INE: case InstructionCodes.FNE: case InstructionCodes.SNE: case InstructionCodes.BNE: case InstructionCodes.RNE: case InstructionCodes.TNE: execBinaryOpCodes(sf, opcode, operands); break; case InstructionCodes.LENGTHOF: calculateLength(operands, sf); break; case InstructionCodes.TYPELOAD: cpIndex = operands[0]; j = operands[1]; TypeRefCPEntry typeEntry = (TypeRefCPEntry) constPool[cpIndex]; sf.refRegs[j] = new BTypeValue(typeEntry.getType()); break; case InstructionCodes.TYPEOF: i = operands[0]; j = operands[1]; if (sf.refRegs[i] == null) { handleNullRefError(); break; } sf.refRegs[j] = new BTypeValue(sf.refRegs[i].getType()); break; case InstructionCodes.IGT: case InstructionCodes.FGT: case InstructionCodes.IGE: case InstructionCodes.FGE: case InstructionCodes.ILT: case InstructionCodes.FLT: case InstructionCodes.ILE: case InstructionCodes.FLE: case InstructionCodes.REQ_NULL: case InstructionCodes.RNE_NULL: case InstructionCodes.BR_TRUE: case InstructionCodes.BR_FALSE: case InstructionCodes.GOTO: case InstructionCodes.HALT: case InstructionCodes.SEQ_NULL: case InstructionCodes.SNE_NULL: execCmpAndBranchOpcodes(sf, opcode, operands); break; case InstructionCodes.TR_RETRY: i = operands[0]; j = operands[1]; retryTransaction(i, j); break; case InstructionCodes.CALL: callIns = (InstructionCALL) instruction; invokeCallableUnit(callIns.functionInfo, callIns.argRegs, callIns.retRegs); break; case InstructionCodes.NCALL: callIns = (InstructionCALL) instruction; invokeNativeFunction(callIns.functionInfo, callIns.argRegs, callIns.retRegs); break; case InstructionCodes.ACALL: InstructionACALL acallIns = (InstructionACALL) instruction; invokeAction(acallIns.actionName, acallIns.argRegs, acallIns.retRegs); break; case InstructionCodes.TCALL: InstructionTCALL tcallIns = (InstructionTCALL) instruction; invokeCallableUnit(tcallIns.transformerInfo, tcallIns.argRegs, tcallIns.retRegs); break; case InstructionCodes.TR_BEGIN: i = operands[0]; j = operands[1]; beginTransaction(i, j); break; case InstructionCodes.TR_END: i = operands[0]; endTransaction(i); break; case InstructionCodes.WRKSEND: InstructionWRKSendReceive wrkSendIns = (InstructionWRKSendReceive) instruction; handleWorkerSend(wrkSendIns.dataChannelInfo, wrkSendIns.types, wrkSendIns.regs); break; case InstructionCodes.WRKRECEIVE: InstructionWRKSendReceive wrkReceiveIns = (InstructionWRKSendReceive) instruction; handleWorkerReceive(wrkReceiveIns.dataChannelInfo, wrkReceiveIns.types, wrkReceiveIns.regs); break; case InstructionCodes.FORKJOIN: InstructionFORKJOIN forkJoinIns = (InstructionFORKJOIN) instruction; invokeForkJoin(forkJoinIns); break; case InstructionCodes.WRKSTART: startWorkers(); break; case InstructionCodes.WRKRETURN: handleWorkerReturn(); break; case InstructionCodes.THROW: i = operands[0]; if (i >= 0) { BStruct error = (BStruct) sf.refRegs[i]; if (error == null) { handleNullRefError(); break; } BLangVMErrors.setStackTrace(context, ip, error); context.setError(error); } handleError(); break; case InstructionCodes.ERRSTORE: i = operands[0]; sf.refRegs[i] = context.getError(); context.setError(null); break; case InstructionCodes.FPCALL: i = operands[0]; if (sf.refRegs[i] == null) { handleNullRefError(); break; } cpIndex = operands[1]; funcCallCPEntry = (FunctionCallCPEntry) constPool[cpIndex]; funcRefCPEntry = ((BFunctionPointer) sf.refRegs[i]).value(); functionInfo = funcRefCPEntry.getFunctionInfo(); if (functionInfo.isNative()) { invokeNativeFunction(functionInfo, funcCallCPEntry.getArgRegs(), funcCallCPEntry.getRetRegs()); } else { invokeCallableUnit(functionInfo, funcCallCPEntry.getArgRegs(), funcCallCPEntry.getRetRegs()); } break; case InstructionCodes.FPLOAD: i = operands[0]; j = operands[1]; funcRefCPEntry = (FunctionRefCPEntry) constPool[i]; sf.refRegs[j] = new BFunctionPointer(funcRefCPEntry); break; case InstructionCodes.I2ANY: case InstructionCodes.F2ANY: case InstructionCodes.S2ANY: case InstructionCodes.B2ANY: case InstructionCodes.L2ANY: case InstructionCodes.ANY2I: case InstructionCodes.ANY2F: case InstructionCodes.ANY2S: case InstructionCodes.ANY2B: case InstructionCodes.ANY2L: case InstructionCodes.ANY2JSON: case InstructionCodes.ANY2XML: case InstructionCodes.ANY2MAP: case InstructionCodes.ANY2TYPE: case InstructionCodes.ANY2E: case InstructionCodes.ANY2T: case InstructionCodes.ANY2C: case InstructionCodes.NULL2JSON: case InstructionCodes.CHECKCAST: case InstructionCodes.B2JSON: case InstructionCodes.JSON2I: case InstructionCodes.JSON2F: case InstructionCodes.JSON2S: case InstructionCodes.JSON2B: case InstructionCodes.NULL2S: execTypeCastOpcodes(sf, opcode, operands); break; case InstructionCodes.I2F: case InstructionCodes.I2S: case InstructionCodes.I2B: case InstructionCodes.I2JSON: case InstructionCodes.F2I: case InstructionCodes.F2S: case InstructionCodes.F2B: case InstructionCodes.F2JSON: case InstructionCodes.S2I: case InstructionCodes.S2F: case InstructionCodes.S2B: case InstructionCodes.S2JSON: case InstructionCodes.B2I: case InstructionCodes.B2F: case InstructionCodes.B2S: case InstructionCodes.DT2XML: case InstructionCodes.DT2JSON: case InstructionCodes.T2MAP: case InstructionCodes.T2JSON: case InstructionCodes.MAP2T: case InstructionCodes.JSON2T: case InstructionCodes.XMLATTRS2MAP: case InstructionCodes.S2XML: case InstructionCodes.S2JSONX: case InstructionCodes.XML2S: execTypeConversionOpcodes(sf, opcode, operands); break; case InstructionCodes.INEWARRAY: i = operands[0]; sf.refRegs[i] = new BIntArray(); break; case InstructionCodes.ARRAYLEN: i = operands[0]; j = operands[1]; BValue value = sf.refRegs[i]; if (value == null) { handleNullRefError(); break; } if (value.getType().getTag() == TypeTags.JSON_TAG) { sf.longRegs[j] = ((BJSON) value).value().size(); break; } sf.longRegs[j] = ((BNewArray) value).size(); break; case InstructionCodes.FNEWARRAY: i = operands[0]; sf.refRegs[i] = new BFloatArray(); break; case InstructionCodes.SNEWARRAY: i = operands[0]; sf.refRegs[i] = new BStringArray(); break; case InstructionCodes.BNEWARRAY: i = operands[0]; sf.refRegs[i] = new BBooleanArray(); break; case InstructionCodes.LNEWARRAY: i = operands[0]; sf.refRegs[i] = new BBlobArray(); break; case InstructionCodes.RNEWARRAY: i = operands[0]; cpIndex = operands[1]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; sf.refRegs[i] = new BRefValueArray(typeRefCPEntry.getType()); break; case InstructionCodes.JSONNEWARRAY: i = operands[0]; j = operands[1]; StringJoiner stringJoiner = new StringJoiner(",", "[", "]"); for (int index = 0; index < sf.longRegs[j]; index++) { stringJoiner.add(null); } sf.refRegs[i] = new BJSON(stringJoiner.toString()); break; case InstructionCodes.NEWSTRUCT: createNewStruct(operands, sf); break; case InstructionCodes.NEWCONNECTOR: createNewConnector(operands, sf); break; case InstructionCodes.NEWMAP: i = operands[0]; sf.refRegs[i] = new BMap<String, BRefType>(); break; case InstructionCodes.NEWJSON: i = operands[0]; cpIndex = operands[1]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; sf.refRegs[i] = new BJSON("{}", typeRefCPEntry.getType()); break; case InstructionCodes.NEWDATATABLE: i = operands[0]; sf.refRegs[i] = new BDataTable(null); break; case InstructionCodes.NEW_INT_RANGE: createNewIntRange(operands, sf); break; case InstructionCodes.IRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.longRegs[callersRetRegIndex] = currentSF.longRegs[j]; break; case InstructionCodes.FRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.doubleRegs[callersRetRegIndex] = currentSF.doubleRegs[j]; break; case InstructionCodes.SRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.stringRegs[callersRetRegIndex] = currentSF.stringRegs[j]; break; case InstructionCodes.BRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.intRegs[callersRetRegIndex] = currentSF.intRegs[j]; break; case InstructionCodes.LRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.byteRegs[callersRetRegIndex] = currentSF.byteRegs[j]; break; case InstructionCodes.RRET: i = operands[0]; j = operands[1]; currentSF = controlStack.currentFrame; callersSF = controlStack.currentFrame.prevStackFrame; callersRetRegIndex = currentSF.retRegIndexes[i]; callersSF.refRegs[callersRetRegIndex] = currentSF.refRegs[j]; break; case InstructionCodes.RET: handleReturn(); break; case InstructionCodes.XMLATTRSTORE: case InstructionCodes.XMLATTRLOAD: case InstructionCodes.XML2XMLATTRS: case InstructionCodes.S2QNAME: case InstructionCodes.NEWQNAME: case InstructionCodes.NEWXMLELEMENT: case InstructionCodes.NEWXMLCOMMENT: case InstructionCodes.NEWXMLTEXT: case InstructionCodes.NEWXMLPI: case InstructionCodes.XMLSTORE: case InstructionCodes.XMLLOAD: execXMLOpcodes(sf, opcode, operands); break; case InstructionCodes.ITR_NEW: case InstructionCodes.ITR_NEXT: case InstructionCodes.ITR_HAS_NEXT: execIteratorOperation(sf, instruction); break; default: throw new UnsupportedOperationException(); } } }
class BLangVM { private static final String JOIN_TYPE_SOME = "some"; private static final Logger logger = LoggerFactory.getLogger(BLangVM.class); private Context context; private ControlStack controlStack; private ProgramFile programFile; private ConstantPoolEntry[] constPool; private int ip = 0; private Instruction[] code; private StructureType globalMemBlock; public BLangVM(ProgramFile programFile) { this.programFile = programFile; this.globalMemBlock = programFile.getGlobalMemoryBlock(); } private void traceCode(PackageInfo packageInfo) { PrintStream printStream = System.out; for (int i = 0; i < code.length; i++) { printStream.println(i + ": " + code[i].toString()); } } public void run(Context ctx) { StackFrame currentFrame = ctx.getControlStack().getCurrentFrame(); this.constPool = currentFrame.packageInfo.getConstPoolEntries(); this.code = currentFrame.packageInfo.getInstructions(); this.context = ctx; this.controlStack = context.getControlStack(); this.ip = context.getStartIP(); if (context.getError() != null) { handleError(); } else if (isWaitingOnNonBlockingAction()) { BType[] retTypes = context.nonBlockingContext.actionInfo.getRetParamTypes(); StackFrame calleeSF = controlStack.popFrame(); this.constPool = controlStack.currentFrame.packageInfo.getConstPoolEntries(); this.code = controlStack.currentFrame.packageInfo.getInstructions(); handleReturnFromNativeCallableUnit(controlStack.currentFrame, context.nonBlockingContext.retRegs, calleeSF.returnValues, retTypes); context.nonBlockingContext = null; } try { exec(); } catch (Throwable e) { String message; if (e.getMessage() == null) { message = "unknown error occurred"; } else { message = e.getMessage(); } context.setError(BLangVMErrors.createError(context, ip, message)); handleError(); } finally { Debugger debugger = programFile.getDebugger(); if (debugger.isDebugEnabled() && debugger.isClientSessionActive() && context.getDebugContext().isAtive()) { context.getDebugContext().setActive(false); debugger.releaseDebugSessionLock(); } if (!isWaitingOnNonBlockingAction() || context.getError() != null) { ctx.endTrackWorker(); } } } public void execWorker(Context context, int startIP) { context.setStartIP(startIP); Debugger debugger = programFile.getDebugger(); if (debugger.isDebugEnabled() && debugger.isClientSessionActive()) { DebuggerUtil.initDebugContext(context, debugger); } run(context); } /** * Act as a virtual CPU. */ private void execCmpAndBranchOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; switch (opcode) { case InstructionCodes.IGT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] > sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FGT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] > sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.IGE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] >= sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FGE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] >= sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.ILT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] < sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FLT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] < sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.ILE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] <= sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FLE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] <= sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.REQ_NULL: i = operands[0]; j = operands[1]; if (sf.refRegs[i] == null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.RNE_NULL: i = operands[0]; j = operands[1]; if (sf.refRegs[i] != null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.SEQ_NULL: i = operands[0]; j = operands[1]; if (sf.stringRegs[i] == null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.SNE_NULL: i = operands[0]; j = operands[1]; if (sf.stringRegs[i] != null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.BR_TRUE: i = operands[0]; j = operands[1]; if (sf.intRegs[i] == 1) { ip = j; } break; case InstructionCodes.BR_FALSE: i = operands[0]; j = operands[1]; if (sf.intRegs[i] == 0) { ip = j; } break; case InstructionCodes.GOTO: i = operands[0]; ip = i; break; case InstructionCodes.HALT: ip = -1; break; default: throw new UnsupportedOperationException(); } } private void execLoadOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int lvIndex; int fieldIndex; BIntArray bIntArray; BFloatArray bFloatArray; BStringArray bStringArray; BBooleanArray bBooleanArray; BBlobArray bBlobArray; BRefValueArray bArray; StructureType structureType; BMap<String, BRefType> bMap; BJSON jsonVal; switch (opcode) { case InstructionCodes.IMOVE: lvIndex = operands[0]; i = operands[1]; sf.longRegs[i] = sf.longRegs[lvIndex]; break; case InstructionCodes.FMOVE: lvIndex = operands[0]; i = operands[1]; sf.doubleRegs[i] = sf.doubleRegs[lvIndex]; break; case InstructionCodes.SMOVE: lvIndex = operands[0]; i = operands[1]; sf.stringRegs[i] = sf.stringRegs[lvIndex]; break; case InstructionCodes.BMOVE: lvIndex = operands[0]; i = operands[1]; sf.intRegs[i] = sf.intRegs[lvIndex]; break; case InstructionCodes.LMOVE: lvIndex = operands[0]; i = operands[1]; sf.byteRegs[i] = sf.byteRegs[lvIndex]; break; case InstructionCodes.RMOVE: lvIndex = operands[0]; i = operands[1]; sf.refRegs[i] = sf.refRegs[lvIndex]; break; case InstructionCodes.IALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bIntArray = (BIntArray) sf.refRegs[i]; if (bIntArray == null) { handleNullRefError(); break; } try { sf.longRegs[k] = bIntArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.FALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bFloatArray = (BFloatArray) sf.refRegs[i]; if (bFloatArray == null) { handleNullRefError(); break; } try { sf.doubleRegs[k] = bFloatArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.SALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bStringArray = (BStringArray) sf.refRegs[i]; if (bStringArray == null) { handleNullRefError(); break; } try { sf.stringRegs[k] = bStringArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.BALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bBooleanArray = (BBooleanArray) sf.refRegs[i]; if (bBooleanArray == null) { handleNullRefError(); break; } try { sf.intRegs[k] = bBooleanArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.LALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bBlobArray = (BBlobArray) sf.refRegs[i]; if (bBlobArray == null) { handleNullRefError(); break; } try { sf.byteRegs[k] = bBlobArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.RALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bArray = (BRefValueArray) sf.refRegs[i]; if (bArray == null) { handleNullRefError(); break; } try { sf.refRegs[k] = bArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.JSONALOAD: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } try { sf.refRegs[k] = JSONUtils.getArrayElement(jsonVal, sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.IGLOAD: i = operands[0]; j = operands[1]; sf.longRegs[j] = globalMemBlock.getIntField(i); break; case InstructionCodes.FGLOAD: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = globalMemBlock.getFloatField(i); break; case InstructionCodes.SGLOAD: i = operands[0]; j = operands[1]; sf.stringRegs[j] = globalMemBlock.getStringField(i); break; case InstructionCodes.BGLOAD: i = operands[0]; j = operands[1]; sf.intRegs[j] = globalMemBlock.getBooleanField(i); break; case InstructionCodes.LGLOAD: i = operands[0]; j = operands[1]; sf.byteRegs[j] = globalMemBlock.getBlobField(i); break; case InstructionCodes.RGLOAD: i = operands[0]; j = operands[1]; sf.refRegs[j] = globalMemBlock.getRefField(i); break; case InstructionCodes.IFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.longRegs[j] = structureType.getIntField(fieldIndex); break; case InstructionCodes.FFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.doubleRegs[j] = structureType.getFloatField(fieldIndex); break; case InstructionCodes.SFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.stringRegs[j] = structureType.getStringField(fieldIndex); break; case InstructionCodes.BFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.intRegs[j] = structureType.getBooleanField(fieldIndex); break; case InstructionCodes.LFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.byteRegs[j] = structureType.getBlobField(fieldIndex); break; case InstructionCodes.RFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.refRegs[j] = structureType.getRefField(fieldIndex); break; case InstructionCodes.MAPLOAD: i = operands[0]; j = operands[1]; k = operands[2]; bMap = (BMap<String, BRefType>) sf.refRegs[i]; if (bMap == null) { handleNullRefError(); break; } sf.refRegs[k] = bMap.get(sf.stringRegs[j]); break; case InstructionCodes.JSONLOAD: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } sf.refRegs[k] = JSONUtils.getElement(jsonVal, sf.stringRegs[j]); break; case InstructionCodes.ENUMERATORLOAD: i = operands[0]; j = operands[1]; k = operands[2]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[i]; BEnumType enumType = (BEnumType) typeRefCPEntry.getType(); sf.refRegs[k] = enumType.getEnumerator(j); break; default: throw new UnsupportedOperationException(); } } private void execStoreOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int lvIndex; int fieldIndex; BIntArray bIntArray; BFloatArray bFloatArray; BStringArray bStringArray; BBooleanArray bBooleanArray; BBlobArray bBlobArray; BRefValueArray bArray; StructureType structureType; BMap<String, BRefType> bMap; BJSON jsonVal; switch (opcode) { case InstructionCodes.ISTORE: i = operands[0]; lvIndex = operands[1]; sf.longRegs[lvIndex] = sf.longRegs[i]; break; case InstructionCodes.FSTORE: i = operands[0]; lvIndex = operands[1]; sf.doubleRegs[lvIndex] = sf.doubleRegs[i]; break; case InstructionCodes.SSTORE: i = operands[0]; lvIndex = operands[1]; sf.stringRegs[lvIndex] = sf.stringRegs[i]; break; case InstructionCodes.BSTORE: i = operands[0]; lvIndex = operands[1]; sf.intRegs[lvIndex] = sf.intRegs[i]; break; case InstructionCodes.LSTORE: i = operands[0]; lvIndex = operands[1]; sf.byteRegs[lvIndex] = sf.byteRegs[i]; break; case InstructionCodes.RSTORE: i = operands[0]; lvIndex = operands[1]; sf.refRegs[lvIndex] = sf.refRegs[i]; break; case InstructionCodes.IASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bIntArray = (BIntArray) sf.refRegs[i]; if (bIntArray == null) { handleNullRefError(); break; } try { bIntArray.add(sf.longRegs[j], sf.longRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.FASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bFloatArray = (BFloatArray) sf.refRegs[i]; if (bFloatArray == null) { handleNullRefError(); break; } try { bFloatArray.add(sf.longRegs[j], sf.doubleRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.SASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bStringArray = (BStringArray) sf.refRegs[i]; if (bStringArray == null) { handleNullRefError(); break; } try { bStringArray.add(sf.longRegs[j], sf.stringRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.BASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bBooleanArray = (BBooleanArray) sf.refRegs[i]; if (bBooleanArray == null) { handleNullRefError(); break; } try { bBooleanArray.add(sf.longRegs[j], sf.intRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.LASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bBlobArray = (BBlobArray) sf.refRegs[i]; if (bBlobArray == null) { handleNullRefError(); break; } try { bBlobArray.add(sf.longRegs[j], sf.byteRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.RASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bArray = (BRefValueArray) sf.refRegs[i]; if (bArray == null) { handleNullRefError(); break; } try { bArray.add(sf.longRegs[j], sf.refRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.JSONASTORE: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } try { JSONUtils.setArrayElement(jsonVal, sf.longRegs[j], (BJSON) sf.refRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.IGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setIntField(j, sf.longRegs[i]); break; case InstructionCodes.FGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setFloatField(j, sf.doubleRegs[i]); break; case InstructionCodes.SGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setStringField(j, sf.stringRegs[i]); break; case InstructionCodes.BGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setBooleanField(j, sf.intRegs[i]); break; case InstructionCodes.LGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setBlobField(j, sf.byteRegs[i]); break; case InstructionCodes.RGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setRefField(j, sf.refRegs[i]); break; case InstructionCodes.IFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setIntField(fieldIndex, sf.longRegs[j]); break; case InstructionCodes.FFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setFloatField(fieldIndex, sf.doubleRegs[j]); break; case InstructionCodes.SFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setStringField(fieldIndex, sf.stringRegs[j]); break; case InstructionCodes.BFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setBooleanField(fieldIndex, sf.intRegs[j]); break; case InstructionCodes.LFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setBlobField(fieldIndex, sf.byteRegs[j]); break; case InstructionCodes.RFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setRefField(fieldIndex, sf.refRegs[j]); break; case InstructionCodes.MAPSTORE: i = operands[0]; j = operands[1]; k = operands[2]; bMap = (BMap<String, BRefType>) sf.refRegs[i]; if (bMap == null) { handleNullRefError(); break; } bMap.put(sf.stringRegs[j], sf.refRegs[k]); break; case InstructionCodes.JSONSTORE: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } JSONUtils.setElement(jsonVal, sf.stringRegs[j], (BJSON) sf.refRegs[k]); break; default: throw new UnsupportedOperationException(); } } private void execBinaryOpCodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; switch (opcode) { case InstructionCodes.IADD: i = operands[0]; j = operands[1]; k = operands[2]; sf.longRegs[k] = sf.longRegs[i] + sf.longRegs[j]; break; case InstructionCodes.FADD: i = operands[0]; j = operands[1]; k = operands[2]; sf.doubleRegs[k] = sf.doubleRegs[i] + sf.doubleRegs[j]; break; case InstructionCodes.SADD: i = operands[0]; j = operands[1]; k = operands[2]; sf.stringRegs[k] = sf.stringRegs[i] + sf.stringRegs[j]; break; case InstructionCodes.XMLADD: i = operands[0]; j = operands[1]; k = operands[2]; BXML lhsXMLVal = (BXML) sf.refRegs[i]; BXML rhsXMLVal = (BXML) sf.refRegs[j]; if (lhsXMLVal == null || rhsXMLVal == null) { handleNullRefError(); break; } sf.refRegs[k] = XMLUtils.concatenate(lhsXMLVal, rhsXMLVal); break; case InstructionCodes.ISUB: i = operands[0]; j = operands[1]; k = operands[2]; sf.longRegs[k] = sf.longRegs[i] - sf.longRegs[j]; break; case InstructionCodes.FSUB: i = operands[0]; j = operands[1]; k = operands[2]; sf.doubleRegs[k] = sf.doubleRegs[i] - sf.doubleRegs[j]; break; case InstructionCodes.IMUL: i = operands[0]; j = operands[1]; k = operands[2]; sf.longRegs[k] = sf.longRegs[i] * sf.longRegs[j]; break; case InstructionCodes.FMUL: i = operands[0]; j = operands[1]; k = operands[2]; sf.doubleRegs[k] = sf.doubleRegs[i] * sf.doubleRegs[j]; break; case InstructionCodes.IDIV: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.longRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.longRegs[k] = sf.longRegs[i] / sf.longRegs[j]; break; case InstructionCodes.FDIV: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.doubleRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.doubleRegs[k] = sf.doubleRegs[i] / sf.doubleRegs[j]; break; case InstructionCodes.IMOD: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.longRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.longRegs[k] = sf.longRegs[i] % sf.longRegs[j]; break; case InstructionCodes.FMOD: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.doubleRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.doubleRegs[k] = sf.doubleRegs[i] % sf.doubleRegs[j]; break; case InstructionCodes.INEG: i = operands[0]; j = operands[1]; sf.longRegs[j] = -sf.longRegs[i]; break; case InstructionCodes.FNEG: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = -sf.doubleRegs[i]; break; case InstructionCodes.BNOT: i = operands[0]; j = operands[1]; sf.intRegs[j] = sf.intRegs[i] == 0 ? 1 : 0; break; case InstructionCodes.IEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] == sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] == sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.SEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = StringUtils.isEqual(sf.stringRegs[i], sf.stringRegs[j]) ? 1 : 0; break; case InstructionCodes.BEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.intRegs[i] == sf.intRegs[j] ? 1 : 0; break; case InstructionCodes.REQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.refRegs[i] == sf.refRegs[j] ? 1 : 0; break; case InstructionCodes.TEQ: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.refRegs[i] == null || sf.refRegs[j] == null) { handleNullRefError(); } sf.intRegs[k] = sf.refRegs[i].equals(sf.refRegs[j]) ? 1 : 0; break; case InstructionCodes.INE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] != sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] != sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.SNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = !StringUtils.isEqual(sf.stringRegs[i], sf.stringRegs[j]) ? 1 : 0; break; case InstructionCodes.BNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.intRegs[i] != sf.intRegs[j] ? 1 : 0; break; case InstructionCodes.RNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.refRegs[i] != sf.refRegs[j] ? 1 : 0; break; case InstructionCodes.TNE: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.refRegs[i] == null || sf.refRegs[j] == null) { handleNullRefError(); } sf.intRegs[k] = (!sf.refRegs[i].equals(sf.refRegs[j])) ? 1 : 0; break; default: throw new UnsupportedOperationException(); } } private void execXMLOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int localNameIndex; int uriIndex; int prefixIndex; BXML<?> xmlVal; BXMLQName xmlQName; switch (opcode) { case InstructionCodes.XMLATTRSTORE: i = operands[0]; j = operands[1]; k = operands[2]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { handleNullRefError(); break; } xmlQName = (BXMLQName) sf.refRegs[j]; if (xmlQName == null) { handleNullRefError(); break; } xmlVal.setAttribute(xmlQName.getLocalName(), xmlQName.getUri(), xmlQName.getPrefix(), sf.stringRegs[k]); break; case InstructionCodes.XMLATTRLOAD: i = operands[0]; j = operands[1]; k = operands[2]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { handleNullRefError(); break; } xmlQName = (BXMLQName) sf.refRegs[j]; if (xmlQName == null) { handleNullRefError(); break; } sf.stringRegs[k] = xmlVal.getAttribute(xmlQName.getLocalName(), xmlQName.getUri(), xmlQName.getPrefix()); break; case InstructionCodes.XML2XMLATTRS: i = operands[0]; j = operands[1]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { sf.refRegs[j] = null; break; } sf.refRegs[j] = new BXMLAttributes(xmlVal); break; case InstructionCodes.S2QNAME: i = operands[0]; j = operands[1]; k = operands[2]; String qNameStr = sf.stringRegs[i]; int parenEndIndex = qNameStr.indexOf('}'); if (qNameStr.startsWith("{") && parenEndIndex > 0) { sf.stringRegs[j] = qNameStr.substring(parenEndIndex + 1, qNameStr.length()); sf.stringRegs[k] = qNameStr.substring(1, parenEndIndex); } else { sf.stringRegs[j] = qNameStr; sf.stringRegs[k] = STRING_NULL_VALUE; } break; case InstructionCodes.NEWQNAME: localNameIndex = operands[0]; uriIndex = operands[1]; prefixIndex = operands[2]; i = operands[3]; String localname = sf.stringRegs[localNameIndex]; localname = StringEscapeUtils.escapeXml11(localname); String prefix = sf.stringRegs[prefixIndex]; prefix = StringEscapeUtils.escapeXml11(prefix); sf.refRegs[i] = new BXMLQName(localname, sf.stringRegs[uriIndex], prefix); break; case InstructionCodes.XMLLOAD: i = operands[0]; j = operands[1]; k = operands[2]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { handleNullRefError(); break; } long index = sf.longRegs[j]; sf.refRegs[k] = xmlVal.getItem(index); break; case InstructionCodes.NEWXMLELEMENT: case InstructionCodes.NEWXMLCOMMENT: case InstructionCodes.NEWXMLTEXT: case InstructionCodes.NEWXMLPI: case InstructionCodes.XMLSTORE: execXMLCreationOpcodes(sf, opcode, operands); break; default: throw new UnsupportedOperationException(); } } private void execTypeCastOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int cpIndex; BRefType bRefType; TypeRefCPEntry typeRefCPEntry; switch (opcode) { case InstructionCodes.I2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BInteger(sf.longRegs[i]); break; case InstructionCodes.F2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BFloat(sf.doubleRegs[i]); break; case InstructionCodes.S2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BString(sf.stringRegs[i]); break; case InstructionCodes.B2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BBoolean(sf.intRegs[i] == 1); break; case InstructionCodes.L2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BBlob(sf.byteRegs[i]); break; case InstructionCodes.ANY2I: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.longRegs[j] = 0; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeInt); } else if (bRefType.getType() == BTypes.typeInt) { sf.refRegs[k] = null; sf.longRegs[j] = ((BInteger) bRefType).intValue(); } else { sf.longRegs[j] = 0; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeInt); } break; case InstructionCodes.ANY2F: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.doubleRegs[j] = 0; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeFloat); } else if (bRefType.getType() == BTypes.typeFloat) { sf.refRegs[k] = null; sf.doubleRegs[j] = ((BFloat) bRefType).floatValue(); } else { sf.doubleRegs[j] = 0; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeFloat); } break; case InstructionCodes.ANY2S: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.stringRegs[j] = STRING_NULL_VALUE; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeString); } else if (bRefType.getType() == BTypes.typeString) { sf.refRegs[k] = null; sf.stringRegs[j] = bRefType.stringValue(); } else { sf.stringRegs[j] = STRING_NULL_VALUE; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeString); } break; case InstructionCodes.ANY2B: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.intRegs[j] = 0; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeBoolean); } else if (bRefType.getType() == BTypes.typeBoolean) { sf.refRegs[k] = null; sf.intRegs[j] = ((BBoolean) bRefType).booleanValue() ? 1 : 0; } else { sf.intRegs[j] = 0; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeBoolean); } break; case InstructionCodes.ANY2L: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.byteRegs[j] = new byte[0]; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeBlob); } else if (bRefType.getType() == BTypes.typeBlob) { sf.refRegs[k] = null; sf.byteRegs[j] = ((BBlob) bRefType).blobValue(); } else { sf.byteRegs[j] = new byte[0]; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeBlob); } break; case InstructionCodes.ANY2JSON: handleAnyToRefTypeCast(sf, operands, BTypes.typeJSON); break; case InstructionCodes.ANY2XML: handleAnyToRefTypeCast(sf, operands, BTypes.typeXML); break; case InstructionCodes.ANY2MAP: handleAnyToRefTypeCast(sf, operands, BTypes.typeMap); break; case InstructionCodes.ANY2TYPE: handleAnyToRefTypeCast(sf, operands, BTypes.typeType); break; case InstructionCodes.ANY2DT: handleAnyToRefTypeCast(sf, operands, BTypes.typeDatatable); break; case InstructionCodes.ANY2E: case InstructionCodes.ANY2T: case InstructionCodes.ANY2C: case InstructionCodes.CHECKCAST: i = operands[0]; cpIndex = operands[1]; j = operands[2]; k = operands[3]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.refRegs[j] = null; sf.refRegs[k] = null; } else if (checkCast(bRefType, typeRefCPEntry.getType())) { sf.refRegs[j] = sf.refRegs[i]; sf.refRegs[k] = null; } else { sf.refRegs[j] = null; handleTypeCastError(sf, k, bRefType.getType(), typeRefCPEntry.getType()); } break; case InstructionCodes.NULL2JSON: j = operands[1]; sf.refRegs[j] = new BJSON("null"); break; case InstructionCodes.B2JSON: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BJSON(sf.intRegs[i] == 1 ? "true" : "false"); break; case InstructionCodes.JSON2I: castJSONToInt(operands, sf); break; case InstructionCodes.JSON2F: castJSONToFloat(operands, sf); break; case InstructionCodes.JSON2S: castJSONToString(operands, sf); break; case InstructionCodes.JSON2B: castJSONToBoolean(operands, sf); break; case InstructionCodes.NULL2S: j = operands[1]; sf.stringRegs[j] = null; break; default: throw new UnsupportedOperationException(); } } private void execTypeConversionOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; BRefType bRefType; String str; switch (opcode) { case InstructionCodes.I2F: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = (double) sf.longRegs[i]; break; case InstructionCodes.I2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = Long.toString(sf.longRegs[i]); break; case InstructionCodes.I2B: i = operands[0]; j = operands[1]; sf.intRegs[j] = sf.longRegs[i] != 0 ? 1 : 0; break; case InstructionCodes.I2JSON: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BJSON(Long.toString(sf.longRegs[i])); break; case InstructionCodes.F2I: i = operands[0]; j = operands[1]; sf.longRegs[j] = (long) sf.doubleRegs[i]; break; case InstructionCodes.F2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = Double.toString(sf.doubleRegs[i]); break; case InstructionCodes.F2B: i = operands[0]; j = operands[1]; sf.intRegs[j] = sf.doubleRegs[i] != 0.0 ? 1 : 0; break; case InstructionCodes.F2JSON: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BJSON(Double.toString(sf.doubleRegs[i])); break; case InstructionCodes.S2I: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; if (str == null) { sf.longRegs[j] = 0; handleTypeConversionError(sf, k, null, TypeConstants.INT_TNAME); break; } try { sf.longRegs[j] = Long.parseLong(str); sf.refRegs[k] = null; } catch (NumberFormatException e) { sf.longRegs[j] = 0; handleTypeConversionError(sf, k, TypeConstants.STRING_TNAME, TypeConstants.INT_TNAME); } break; case InstructionCodes.S2F: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; if (str == null) { sf.doubleRegs[j] = 0; handleTypeConversionError(sf, k, null, TypeConstants.FLOAT_TNAME); break; } try { sf.doubleRegs[j] = Double.parseDouble(str); sf.refRegs[k] = null; } catch (NumberFormatException e) { sf.doubleRegs[j] = 0; handleTypeConversionError(sf, k, TypeConstants.STRING_TNAME, TypeConstants.FLOAT_TNAME); } break; case InstructionCodes.S2B: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[j] = Boolean.parseBoolean(sf.stringRegs[i]) ? 1 : 0; sf.refRegs[k] = null; break; case InstructionCodes.S2JSON: i = operands[0]; j = operands[1]; str = StringEscapeUtils.escapeJson(sf.stringRegs[i]); sf.refRegs[j] = str == null ? null : new BJSON("\"" + str + "\""); break; case InstructionCodes.B2I: i = operands[0]; j = operands[1]; sf.longRegs[j] = sf.intRegs[i]; break; case InstructionCodes.B2F: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = sf.intRegs[i]; break; case InstructionCodes.B2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = sf.intRegs[i] == 1 ? "true" : "false"; break; case InstructionCodes.DT2XML: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { handleNullRefError(); break; } try { sf.refRegs[j] = XMLUtils.datatableToXML((BDataTable) bRefType, context.isInTransaction()); sf.refRegs[k] = null; } catch (Exception e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, TypeConstants.DATATABLE_TNAME, TypeConstants.XML_TNAME); } break; case InstructionCodes.DT2JSON: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { handleNullRefError(); break; } try { sf.refRegs[j] = JSONUtils.toJSON((BDataTable) bRefType, context.isInTransaction()); sf.refRegs[k] = null; } catch (Exception e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, TypeConstants.DATATABLE_TNAME, TypeConstants.XML_TNAME); } break; case InstructionCodes.T2MAP: convertStructToMap(operands, sf); break; case InstructionCodes.T2JSON: convertStructToJSON(operands, sf); break; case InstructionCodes.MAP2T: convertMapToStruct(operands, sf); break; case InstructionCodes.JSON2T: convertJSONToStruct(operands, sf); break; case InstructionCodes.XMLATTRS2MAP: i = operands[0]; j = operands[1]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.refRegs[j] = null; break; } sf.refRegs[j] = ((BXMLAttributes) sf.refRegs[i]).value(); break; case InstructionCodes.S2XML: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; if (str == null) { sf.refRegs[j] = null; sf.refRegs[k] = null; break; } try { sf.refRegs[j] = XMLUtils.parse(str); sf.refRegs[k] = null; } catch (BallerinaException e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, e.getMessage(), TypeConstants.STRING_TNAME, TypeConstants.XML_TNAME); } break; case InstructionCodes.S2JSONX: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; try { sf.refRegs[j] = str == null ? null : new BJSON(str); sf.refRegs[k] = null; } catch (BallerinaException e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, e.getMessage(), TypeConstants.STRING_TNAME, TypeConstants.JSON_TNAME); } break; case InstructionCodes.XML2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = sf.refRegs[i].stringValue(); break; default: throw new UnsupportedOperationException(); } } private void execIteratorOperation(StackFrame sf, Instruction instruction) { int i, j; BCollection collection; BIterator iterator; InstructionIteratorNext nextInstruction; switch (instruction.getOpcode()) { case InstructionCodes.ITR_NEW: i = instruction.getOperands()[0]; j = instruction.getOperands()[1]; collection = (BCollection) sf.refRegs[i]; if (collection == null) { handleNullRefError(); return; } sf.refRegs[j] = collection.newIterator(); break; case InstructionCodes.ITR_HAS_NEXT: i = instruction.getOperands()[0]; j = instruction.getOperands()[1]; iterator = (BIterator) sf.refRegs[i]; if (iterator == null) { sf.intRegs[j] = 0; return; } sf.intRegs[j] = iterator.hasNext() ? 1 : 0; break; case InstructionCodes.ITR_NEXT: nextInstruction = (InstructionIteratorNext) instruction; iterator = (BIterator) sf.refRegs[nextInstruction.iteratorIndex]; if (iterator == null) { return; } BValue[] values = iterator.getNext(nextInstruction.arity); copyValuesToRegistries(nextInstruction.typeTags, nextInstruction.retRegs, values, sf); break; } } private void copyValuesToRegistries(int[] typeTags, int[] targetReg, BValue[] values, StackFrame sf) { for (int i = 0; i < typeTags.length; i++) { BValue source = values[i]; int target = targetReg[i]; switch (typeTags[i]) { case TypeTags.INT_TAG: sf.longRegs[target] = ((BInteger) source).intValue(); break; case TypeTags.FLOAT_TAG: sf.doubleRegs[target] = ((BFloat) source).floatValue(); break; case TypeTags.STRING_TAG: sf.stringRegs[target] = source.stringValue(); break; case TypeTags.BOOLEAN_TAG: sf.intRegs[target] = ((BBoolean) source).booleanValue() ? 1 : 0; break; case TypeTags.BLOB_TAG: sf.byteRegs[target] = ((BBlob) source).blobValue(); break; default: sf.refRegs[target] = (BRefType) source; } } } private void execXMLCreationOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int l; BXML<?> xmlVal; switch (opcode) { case InstructionCodes.NEWXMLELEMENT: i = operands[0]; j = operands[1]; k = operands[2]; l = operands[3]; BXMLQName startTagName = (BXMLQName) sf.refRegs[j]; BXMLQName endTagName = (BXMLQName) sf.refRegs[k]; try { sf.refRegs[i] = XMLUtils.createXMLElement(startTagName, endTagName, sf.stringRegs[l]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.NEWXMLCOMMENT: i = operands[0]; j = operands[1]; try { sf.refRegs[i] = XMLUtils.createXMLComment(sf.stringRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.NEWXMLTEXT: i = operands[0]; j = operands[1]; try { sf.refRegs[i] = XMLUtils.createXMLText(sf.stringRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.NEWXMLPI: i = operands[0]; j = operands[1]; k = operands[2]; try { sf.refRegs[i] = XMLUtils.createXMLProcessingInstruction(sf.stringRegs[j], sf.stringRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.XMLSTORE: i = operands[0]; j = operands[1]; xmlVal = (BXML<?>) sf.refRegs[i]; BXML<?> child = (BXML<?>) sf.refRegs[j]; xmlVal.addChildren(child); break; } } /** * Method to calculate and detect debug points when the instruction point is given. */ private void debug() { Debugger debugger = programFile.getDebugger(); if (!debugger.isClientSessionActive()) { return; } DebugContext debugContext = context.getDebugContext(); LineNumberInfo currentExecLine = debugger .getLineNumber(controlStack.currentFrame.packageInfo.getPkgPath(), ip); /* Below if check stops hitting the same debug line again and again in case that single line has multiple instructions. */ if (currentExecLine.equals(debugContext.getLastLine()) || debugPointCheck(currentExecLine, debugger, debugContext)) { return; } switch (debugContext.getCurrentCommand()) { case RESUME: /* In case of a for loop, need to clear the last hit line, so that, same line can get hit again. */ debugContext.clearLastDebugLine(); break; case STEP_IN: debugHit(currentExecLine, debugger, debugContext); break; case STEP_OVER: if (controlStack.currentFrame == debugContext.getStackFrame()) { debugHit(currentExecLine, debugger, debugContext); return; } /* This is either, 1) function call (instruction of the next function) 2) returning to the previous function below if condition checks the 2nd possibility, and if that's the case, then it's a debug hit. To check that, it needs to check whether last line contains return instruction or not. (return line may have multiple instructions, ex - return v1 + v2 * v3 + v4; */ if (debugContext.getLastLine().checkIpRangeForInstructionCode(code, InstructionCodes.RET) && controlStack.currentFrame == debugContext.getStackFrame().prevStackFrame) { debugHit(currentExecLine, debugger, debugContext); return; } /* This means it's a function call. So using intermediate step to wait until returning from that function call. */ debugContext.setCurrentCommand(DebugCommand.STEP_OVER_INTMDT); break; case STEP_OVER_INTMDT: /* Here it checks whether it has returned to the previous stack frame (that is previous function) if so, then debug hit. */ interMediateDebugCheck(currentExecLine, debugger, debugContext); break; case STEP_OUT: /* This is the first instruction of immediate next line of the last debug hit point. So next debug hit point should be when it comes to the "previousStackFrame" of the "stackFrame" relevant to the last debug hit point. So here that stack frame is saved and using intermediate step to wait until a instruction for that stack frame. */ debugContext.setCurrentCommand(DebugCommand.STEP_OUT_INTMDT); debugContext.setStackFrame(debugContext.getStackFrame().prevStackFrame); interMediateDebugCheck(currentExecLine, debugger, debugContext); break; case STEP_OUT_INTMDT: interMediateDebugCheck(currentExecLine, debugger, debugContext); break; default: logger.warn("invalid debug command, exiting from debugging"); debugger.notifyExit(); debugger.stopDebugging(); } } /** * Inter mediate debug check to avoid switch case falling through. * * @param currentExecLine Current execution line. * @param debugger Debugger object. * @param debugContext Current debug context. */ private void interMediateDebugCheck(LineNumberInfo currentExecLine, Debugger debugger, DebugContext debugContext) { if (controlStack.currentFrame != debugContext.getStackFrame()) { return; } debugHit(currentExecLine, debugger, debugContext); } /** * Helper method to check whether given point is a debug point or not. * If it's a debug point, then notify the debugger. * * @param currentExecLine Current execution line. * @param debugger Debugger object. * @param debugContext Current debug context. * @return Boolean true if it's a debug point, false otherwise. */ private boolean debugPointCheck(LineNumberInfo currentExecLine, Debugger debugger, DebugContext debugContext) { if (!currentExecLine.isDebugPoint()) { return false; } debugHit(currentExecLine, debugger, debugContext); return true; } /** * Helper method to set required details when a debug point hits. * And also to notify the debugger. * * @param currentExecLine Current execution line. * @param debugger Debugger object. * @param debugContext Current debug context. */ private void debugHit(LineNumberInfo currentExecLine, Debugger debugger, DebugContext debugContext) { if (!debugContext.isAtive() && !debugger.tryAcquireDebugSessionLock()) { return; } debugContext.setActive(true); debugContext.setLastLine(currentExecLine); debugContext.setStackFrame(controlStack.currentFrame); debugger.notifyDebugHit(controlStack.currentFrame, currentExecLine, debugContext.getThreadId()); debugger.waitTillDebuggeeResponds(); if (debugContext.getCurrentCommand() == DebugCommand.RESUME && debugContext.isAtive()) { debugContext.setActive(false); debugger.releaseDebugSessionLock(); } } private void handleAnyToRefTypeCast(StackFrame sf, int[] operands, BType targetType) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BRefType bRefType = sf.refRegs[i]; if (bRefType == null) { sf.refRegs[j] = null; sf.refRegs[k] = null; } else if (bRefType.getType() == targetType) { sf.refRegs[j] = bRefType; sf.refRegs[k] = null; } else { sf.refRegs[j] = null; handleTypeCastError(sf, k, bRefType.getType(), targetType); } } private void handleTypeCastError(StackFrame sf, int errorRegIndex, BType sourceType, BType targetType) { handleTypeCastError(sf, errorRegIndex, sourceType.toString(), targetType.toString()); } private void handleTypeCastError(StackFrame sf, int errorRegIndex, String sourceType, String targetType) { BStruct errorVal; errorVal = BLangVMErrors.createTypeCastError(context, ip, sourceType.toString(), targetType.toString()); if (errorRegIndex == -1) { context.setError(errorVal); handleError(); return; } sf.refRegs[errorRegIndex] = errorVal; } private void handleTypeConversionError(StackFrame sf, int errorRegIndex, String sourceTypeName, String targetTypeName) { String errorMsg = "'" + sourceTypeName + "' cannot be converted to '" + targetTypeName + "'"; handleTypeConversionError(sf, errorRegIndex, errorMsg, sourceTypeName, targetTypeName); } private void handleTypeConversionError(StackFrame sf, int errorRegIndex, String errorMessage, String sourceTypeName, String targetTypeName) { BStruct errorVal; errorVal = BLangVMErrors.createTypeConversionError(context, ip, errorMessage, sourceTypeName, targetTypeName); if (errorRegIndex == -1) { context.setError(errorVal); handleError(); return; } sf.refRegs[errorRegIndex] = errorVal; } private void createNewIntRange(int[] operands, StackFrame sf) { long startValue = sf.longRegs[operands[0]]; long endValue = sf.longRegs[operands[1]]; sf.refRegs[operands[2]] = new BIntRange(startValue, endValue); } private void createNewConnector(int[] operands, StackFrame sf) { int cpIndex = operands[0]; int i = operands[1]; StructureRefCPEntry structureRefCPEntry = (StructureRefCPEntry) constPool[cpIndex]; ConnectorInfo connectorInfo = (ConnectorInfo) structureRefCPEntry.getStructureTypeInfo(); BConnector bConnector = new BConnector(connectorInfo.getType()); sf.refRegs[i] = bConnector; } private void createNewStruct(int[] operands, StackFrame sf) { int cpIndex = operands[0]; int i = operands[1]; StructureRefCPEntry structureRefCPEntry = (StructureRefCPEntry) constPool[cpIndex]; StructInfo structInfo = (StructInfo) structureRefCPEntry.getStructureTypeInfo(); BStruct bStruct = new BStruct(structInfo.getType()); int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; for (StructFieldInfo fieldInfo : structInfo.getFieldInfoEntries()) { DefaultValueAttributeInfo defaultValueInfo = (DefaultValueAttributeInfo) fieldInfo.getAttributeInfo(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE); switch (fieldInfo.getFieldType().getTag()) { case TypeTags.INT_TAG: longRegIndex++; if (defaultValueInfo != null) { bStruct.setIntField(longRegIndex, defaultValueInfo.getDefaultValue().getIntValue()); } break; case TypeTags.FLOAT_TAG: doubleRegIndex++; if (defaultValueInfo != null) { bStruct.setFloatField(doubleRegIndex, defaultValueInfo.getDefaultValue().getFloatValue()); } break; case TypeTags.STRING_TAG: stringRegIndex++; if (defaultValueInfo != null) { bStruct.setStringField(stringRegIndex, defaultValueInfo.getDefaultValue().getStringValue()); } break; case TypeTags.BOOLEAN_TAG: booleanRegIndex++; if (defaultValueInfo != null) { bStruct.setBooleanField(booleanRegIndex, defaultValueInfo.getDefaultValue().getBooleanValue() ? 1 : 0); } break; } } sf.refRegs[i] = bStruct; } private void endTransaction(int status) { BallerinaTransactionManager ballerinaTransactionManager = context.getBallerinaTransactionManager(); if (ballerinaTransactionManager != null) { try { if (status == TransactionStatus.SUCCESS.value()) { ballerinaTransactionManager.commitTransactionBlock(); } else if (status == TransactionStatus.FAILED.value()) { ballerinaTransactionManager.rollbackTransactionBlock(); } else { ballerinaTransactionManager.endTransactionBlock(); if (ballerinaTransactionManager.isOuterTransaction()) { context.setBallerinaTransactionManager(null); } } } catch (Throwable e) { context.setError(BLangVMErrors.createError(this.context, ip, e.getMessage())); handleError(); return; } } } private void beginTransaction(int transactionId, int retryCountRegIndex) { int retryCount = 3; if (retryCountRegIndex != -1) { retryCount = (int) controlStack.currentFrame.getLongRegs()[retryCountRegIndex]; if (retryCount < 0) { context.setError(BLangVMErrors.createError(this.context, ip, BLangExceptionHelper.getErrorMessage(RuntimeErrors.INVALID_RETRY_COUNT))); handleError(); return; } } BallerinaTransactionManager ballerinaTransactionManager = context.getBallerinaTransactionManager(); if (ballerinaTransactionManager == null) { ballerinaTransactionManager = new BallerinaTransactionManager(); context.setBallerinaTransactionManager(ballerinaTransactionManager); } ballerinaTransactionManager.beginTransactionBlock(transactionId, retryCount); } private void retryTransaction(int transactionId, int startOfAbortIP) { BallerinaTransactionManager ballerinaTransactionManager = context.getBallerinaTransactionManager(); int allowedRetryCount = ballerinaTransactionManager.getAllowedRetryCount(transactionId); int currentRetryCount = ballerinaTransactionManager.getCurrentRetryCount(transactionId); if (currentRetryCount >= allowedRetryCount) { if (currentRetryCount != 0) { ip = startOfAbortIP; } } ballerinaTransactionManager.incrementCurrentRetryCount(transactionId); } public void invokeCallableUnit(CallableUnitInfo callableUnitInfo, int[] argRegs, int[] retRegs) { BType[] paramTypes = callableUnitInfo.getParamTypes(); StackFrame callerSF = controlStack.currentFrame; WorkerInfo defaultWorkerInfo = callableUnitInfo.getDefaultWorkerInfo(); StackFrame calleeSF = new StackFrame(callableUnitInfo, defaultWorkerInfo, ip, retRegs); controlStack.pushFrame(calleeSF); copyArgValues(callerSF, calleeSF, argRegs, paramTypes); this.constPool = calleeSF.packageInfo.getConstPoolEntries(); this.code = calleeSF.packageInfo.getInstructions(); ip = defaultWorkerInfo.getCodeAttributeInfo().getCodeAddrs(); } public void invokeAction(String actionName, int[] argRegs, int[] retRegs) { StackFrame callerSF = controlStack.currentFrame; if (callerSF.refRegs[argRegs[0]] == null) { context.setError(BLangVMErrors.createNullRefError(this.context, ip)); handleError(); return; } BConnectorType actualCon = (BConnectorType) ((BConnector) callerSF.refRegs[argRegs[0]]).getConnectorType(); ActionInfo newActionInfo = programFile.getPackageInfo(actualCon.getPackagePath()) .getConnectorInfo(actualCon.getName()).getActionInfo(actionName); if (newActionInfo.isNative()) { invokeNativeAction(newActionInfo, argRegs, retRegs); } else { invokeCallableUnit(newActionInfo, argRegs, retRegs); } } public void handleWorkerSend(WorkerDataChannelInfo workerDataChannel, BType[] types, int[] regs) { StackFrame currentFrame = controlStack.currentFrame; BValue[] arguments = new BValue[types.length]; copyArgValuesForWorkerSend(currentFrame, regs, types, arguments); workerDataChannel.setTypes(types); workerDataChannel.putData(arguments); } public void invokeForkJoin(InstructionFORKJOIN forkJoinIns) { ForkjoinInfo forkjoinInfo = forkJoinIns.forkJoinCPEntry.getForkjoinInfo(); List<BLangVMWorkers.WorkerExecutor> workerRunnerList = new ArrayList<>(); long timeout = Long.MAX_VALUE; if (forkjoinInfo.isTimeoutAvailable()) { timeout = this.controlStack.currentFrame.getLongRegs()[forkJoinIns.timeoutRegIndex]; } Queue<WorkerResult> resultMsgs = new ConcurrentLinkedQueue<>(); Map<String, BLangVMWorkers.WorkerExecutor> workers = new HashMap<>(); for (WorkerInfo workerInfo : forkjoinInfo.getWorkerInfoMap().values()) { Context workerContext = new WorkerContext(this.programFile, context); workerContext.blockingInvocation = true; StackFrame callerSF = this.controlStack.currentFrame; int[] argRegs = forkjoinInfo.getArgRegs(); ControlStack workerControlStack = workerContext.getControlStack(); StackFrame calleeSF = new StackFrame(this.controlStack.currentFrame.getCallableUnitInfo(), workerInfo, -1, new int[1]); workerControlStack.pushFrame(calleeSF); BLangVM.copyValuesForForkJoin(callerSF, calleeSF, argRegs); BLangVM bLangVM = new BLangVM(this.programFile); BLangVMWorkers.WorkerExecutor workerRunner = new BLangVMWorkers.WorkerExecutor(bLangVM, workerContext, workerInfo, resultMsgs); workerRunnerList.add(workerRunner); workerContext.startTrackWorker(); workers.put(workerInfo.getWorkerName(), workerRunner); } Set<String> joinWorkerNames = new LinkedHashSet<>(Lists.of(forkjoinInfo.getJoinWorkerNames())); if (joinWorkerNames.isEmpty()) { /* if no join workers are specified, that means, all should be considered */ joinWorkerNames.addAll(workers.keySet()); } int workerCount; if (forkjoinInfo.getJoinType().equalsIgnoreCase(JOIN_TYPE_SOME)) { workerCount = forkjoinInfo.getWorkerCount(); } else { workerCount = joinWorkerNames.size(); } boolean success = this.invokeJoinWorkers(workers, joinWorkerNames, workerCount, timeout); if (success) { this.ip = forkJoinIns.joinBlockAddr; /* assign values to join block message arrays */ int offsetJoin = forkJoinIns.joinVarRegIndex; BMap<String, BRefValueArray> mbMap = new BMap<>(); for (WorkerResult workerResult : resultMsgs) { mbMap.put(workerResult.getWorkerName(), workerResult.getResult()); } this.controlStack.currentFrame.getRefRegs()[offsetJoin] = mbMap; } else { /* timed out */ this.ip = forkJoinIns.timeoutBlockAddr; /* execute the timeout block */ int offsetTimeout = forkJoinIns.timeoutVarRegIndex; BMap<String, BRefValueArray> mbMap = new BMap<>(); for (WorkerResult workerResult : resultMsgs) { mbMap.put(workerResult.getWorkerName(), workerResult.getResult()); } this.controlStack.currentFrame.getRefRegs()[offsetTimeout] = mbMap; } } private boolean invokeJoinWorkers(Map<String, BLangVMWorkers.WorkerExecutor> workers, Set<String> joinWorkerNames, int joinCount, long timeout) { ExecutorService exec = ThreadPoolFactory.getInstance().getWorkerExecutor(); Semaphore resultCounter = new Semaphore(-joinCount + 1); workers.forEach((k, v) -> { if (joinWorkerNames.contains(k)) { v.setResultCounterSemaphore(resultCounter); } exec.submit(v); }); try { return resultCounter.tryAcquire(timeout, TimeUnit.SECONDS); } catch (InterruptedException ignore) { return false; } } private void startWorkers() { CallableUnitInfo callableUnitInfo = this.controlStack.currentFrame.callableUnitInfo; BLangVMWorkers.invoke(programFile, callableUnitInfo, this.context); } private void handleWorkerReturn() { WorkerContext workerContext = (WorkerContext) this.context; if (workerContext.parentSF.tryReturn()) { StackFrame workerCallerSF = workerContext.getControlStack().currentFrame; workerContext.parentSF.returnedWorker = workerCallerSF.workerInfo.getWorkerName(); StackFrame parentSF = workerContext.parentSF; copyWorkersReturnValues(workerCallerSF, parentSF); this.context = workerContext.parent; this.controlStack = this.context.getControlStack(); controlStack.popFrame(); this.constPool = this.controlStack.currentFrame.packageInfo.getConstPoolEntries(); this.code = this.controlStack.currentFrame.packageInfo.getInstructions(); ip = parentSF.retAddrs; } else { String msg = workerContext.parentSF.returnedWorker + " already returned."; context.setError(BLangVMErrors.createIllegalStateException(context, ip, msg)); handleError(); } } public void handleWorkerReceive(WorkerDataChannelInfo workerDataChannel, BType[] types, int[] regs) { BValue[] passedInValues = (BValue[]) workerDataChannel.takeData(); StackFrame currentFrame = controlStack.currentFrame; copyArgValuesForWorkerReceive(currentFrame, regs, types, passedInValues); } public static void copyArgValuesForWorkerSend(StackFrame callerSF, int[] argRegs, BType[] paramTypes, BValue[] arguments) { for (int i = 0; i < argRegs.length; i++) { BType paramType = paramTypes[i]; int argReg = argRegs[i]; switch (paramType.getTag()) { case TypeTags.INT_TAG: arguments[i] = new BInteger(callerSF.longRegs[argReg]); break; case TypeTags.FLOAT_TAG: arguments[i] = new BFloat(callerSF.doubleRegs[argReg]); break; case TypeTags.STRING_TAG: arguments[i] = new BString(callerSF.stringRegs[argReg]); break; case TypeTags.BOOLEAN_TAG: arguments[i] = new BBoolean(callerSF.intRegs[argReg] > 0); break; case TypeTags.BLOB_TAG: arguments[i] = new BBlob(callerSF.byteRegs[argReg]); break; default: arguments[i] = callerSF.refRegs[argReg]; } } } public static void copyArgValuesForWorkerReceive(StackFrame currentSF, int[] argRegs, BType[] paramTypes, BValue[] passedInValues) { for (int i = 0; i < argRegs.length; i++) { int regIndex = argRegs[i]; BType paramType = paramTypes[i]; switch (paramType.getTag()) { case TypeTags.INT_TAG: currentSF.getLongRegs()[regIndex] = ((BInteger) passedInValues[i]).intValue(); break; case TypeTags.FLOAT_TAG: currentSF.getDoubleRegs()[regIndex] = ((BFloat) passedInValues[i]).floatValue(); break; case TypeTags.STRING_TAG: currentSF.getStringRegs()[regIndex] = (passedInValues[i]).stringValue(); break; case TypeTags.BOOLEAN_TAG: currentSF.getIntRegs()[regIndex] = (((BBoolean) passedInValues[i]).booleanValue()) ? 1 : 0; break; case TypeTags.BLOB_TAG: currentSF.getByteRegs()[regIndex] = ((BBlob) passedInValues[i]).blobValue(); break; default: currentSF.getRefRegs()[regIndex] = (BRefType) passedInValues[i]; } } } public static void copyValuesForForkJoin(StackFrame callerSF, StackFrame calleeSF, int[] argRegs) { int longLocalVals = argRegs[0]; int doubleLocalVals = argRegs[1]; int stringLocalVals = argRegs[2]; int booleanLocalVals = argRegs[3]; int blobLocalVals = argRegs[4]; int refLocalVals = argRegs[5]; for (int i = 0; i <= longLocalVals; i++) { calleeSF.getLongRegs()[i] = callerSF.getLongRegs()[i]; } for (int i = 0; i <= doubleLocalVals; i++) { calleeSF.getDoubleRegs()[i] = callerSF.getDoubleRegs()[i]; } for (int i = 0; i <= stringLocalVals; i++) { calleeSF.getStringRegs()[i] = callerSF.getStringRegs()[i]; } for (int i = 0; i <= booleanLocalVals; i++) { calleeSF.getIntRegs()[i] = callerSF.getIntRegs()[i]; } for (int i = 0; i <= refLocalVals; i++) { calleeSF.getRefRegs()[i] = callerSF.getRefRegs()[i]; } for (int i = 0; i <= blobLocalVals; i++) { calleeSF.getByteRegs()[i] = callerSF.getByteRegs()[i]; } } public static void copyValues(StackFrame parent, StackFrame workerSF) { CodeAttributeInfo codeInfo = parent.callableUnitInfo.getDefaultWorkerInfo().getCodeAttributeInfo(); System.arraycopy(parent.longRegs, 0, workerSF.longRegs, 0, codeInfo.getMaxLongLocalVars()); System.arraycopy(parent.doubleRegs, 0, workerSF.doubleRegs, 0, codeInfo.getMaxDoubleLocalVars()); System.arraycopy(parent.intRegs, 0, workerSF.intRegs, 0, codeInfo.getMaxIntLocalVars()); System.arraycopy(parent.stringRegs, 0, workerSF.stringRegs, 0, codeInfo.getMaxStringLocalVars()); System.arraycopy(parent.byteRegs, 0, workerSF.byteRegs, 0, codeInfo.getMaxByteLocalVars()); System.arraycopy(parent.refRegs, 0, workerSF.refRegs, 0, codeInfo.getMaxRefLocalVars()); } public static void copyArgValues(StackFrame callerSF, StackFrame calleeSF, int[] argRegs, BType[] paramTypes) { int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; int refRegIndex = -1; int blobRegIndex = -1; for (int i = 0; i < argRegs.length; i++) { BType paramType = paramTypes[i]; int argReg = argRegs[i]; switch (paramType.getTag()) { case TypeTags.INT_TAG: calleeSF.longRegs[++longRegIndex] = callerSF.longRegs[argReg]; break; case TypeTags.FLOAT_TAG: calleeSF.doubleRegs[++doubleRegIndex] = callerSF.doubleRegs[argReg]; break; case TypeTags.STRING_TAG: calleeSF.stringRegs[++stringRegIndex] = callerSF.stringRegs[argReg]; break; case TypeTags.BOOLEAN_TAG: calleeSF.intRegs[++booleanRegIndex] = callerSF.intRegs[argReg]; break; case TypeTags.BLOB_TAG: calleeSF.byteRegs[++blobRegIndex] = callerSF.byteRegs[argReg]; break; default: calleeSF.refRegs[++refRegIndex] = callerSF.refRegs[argReg]; } } } private void handleReturn() { StackFrame currentSF = controlStack.popFrame(); if (controlStack.currentFrame != null) { StackFrame callersSF = controlStack.currentFrame; this.constPool = callersSF.packageInfo.getConstPoolEntries(); this.code = callersSF.packageInfo.getInstructions(); } ip = currentSF.retAddrs; } private void copyWorkersReturnValues(StackFrame workerSF, StackFrame parentsSF) { int callersRetRegIndex; int longRegCount = 0; int doubleRegCount = 0; int stringRegCount = 0; int intRegCount = 0; int refRegCount = 0; int byteRegCount = 0; StackFrame workerCallerSF = workerSF.prevStackFrame; StackFrame parentCallersSF = parentsSF.prevStackFrame; BType[] retTypes = parentsSF.getCallableUnitInfo().getRetParamTypes(); for (int i = 0; i < retTypes.length; i++) { BType retType = retTypes[i]; callersRetRegIndex = parentsSF.retRegIndexes[i]; switch (retType.getTag()) { case TypeTags.INT_TAG: parentCallersSF.longRegs[callersRetRegIndex] = workerCallerSF.longRegs[longRegCount++]; break; case TypeTags.FLOAT_TAG: parentCallersSF.doubleRegs[callersRetRegIndex] = workerCallerSF.doubleRegs[doubleRegCount++]; break; case TypeTags.STRING_TAG: parentCallersSF.stringRegs[callersRetRegIndex] = workerCallerSF.stringRegs[stringRegCount++]; break; case TypeTags.BOOLEAN_TAG: parentCallersSF.intRegs[callersRetRegIndex] = workerCallerSF.intRegs[intRegCount++]; break; case TypeTags.BLOB_TAG: parentCallersSF.byteRegs[callersRetRegIndex] = workerCallerSF.byteRegs[byteRegCount++]; break; default: parentCallersSF.refRegs[callersRetRegIndex] = workerCallerSF.refRegs[refRegCount++]; break; } } } private String getOperandsLine(int[] operands) { if (operands.length == 0) { return ""; } if (operands.length == 1) { return "" + operands[0]; } StringBuilder sb = new StringBuilder(); sb.append(operands[0]); for (int i = 1; i < operands.length; i++) { sb.append(" "); sb.append(operands[i]); } return sb.toString(); } private void invokeNativeFunction(FunctionInfo functionInfo, int[] argRegs, int[] retRegs) { StackFrame callerSF = controlStack.currentFrame; BType[] retTypes = functionInfo.getRetParamTypes(); BValue[] returnValues = new BValue[retTypes.length]; StackFrame caleeSF = new StackFrame(functionInfo, functionInfo.getDefaultWorkerInfo(), ip, null, returnValues); copyArgValues(callerSF, caleeSF, argRegs, functionInfo.getParamTypes()); controlStack.pushFrame(caleeSF); AbstractNativeFunction nativeFunction = functionInfo.getNativeFunction(); try { nativeFunction.executeNative(context); } catch (BLangNullReferenceException e) { context.setError(BLangVMErrors.createNullRefError(context, ip)); handleError(); return; } catch (Throwable e) { context.setError(BLangVMErrors.createError(this.context, ip, e.getMessage())); handleError(); return; } controlStack.popFrame(); handleReturnFromNativeCallableUnit(callerSF, retRegs, returnValues, retTypes); } private void invokeNativeAction(ActionInfo actionInfo, int[] argRegs, int[] retRegs) { StackFrame callerSF = controlStack.currentFrame; WorkerInfo defaultWorkerInfo = actionInfo.getDefaultWorkerInfo(); AbstractNativeAction nativeAction = actionInfo.getNativeAction(); if (nativeAction == null) { return; } BType[] retTypes = actionInfo.getRetParamTypes(); BValue[] returnValues = new BValue[retTypes.length]; StackFrame caleeSF = new StackFrame(actionInfo, defaultWorkerInfo, ip, null, returnValues); copyArgValues(callerSF, caleeSF, argRegs, actionInfo.getParamTypes()); controlStack.pushFrame(caleeSF); try { boolean nonBlocking = !context.isInTransaction() && nativeAction.isNonBlockingAction() && !context.blockingInvocation; BClientConnectorFutureListener listener = new BClientConnectorFutureListener(context, nonBlocking); if (nonBlocking) { context.setStartIP(ip); if (caleeSF.packageInfo == null) { caleeSF.packageInfo = actionInfo.getPackageInfo(); } context.nonBlockingContext = new Context.NonBlockingContext(actionInfo, retRegs); ConnectorFuture future = nativeAction.execute(context); if (future == null) { throw new BallerinaException("Native action doesn't provide a future object to sync"); } future.setConnectorFutureListener(listener); ip = -1; } else { ConnectorFuture future = nativeAction.execute(context); if (future == null) { throw new BallerinaException("Native action doesn't provide a future object to sync"); } future.setConnectorFutureListener(listener); long timeout = 300000; boolean res = listener.sync(timeout); if (!res) { throw new BallerinaException("Action execution timed out, timeout period - " + timeout + ", Action - " + nativeAction.getPackagePath() + ":" + nativeAction.getName()); } if (context.getError() != null) { handleError(); } controlStack.popFrame(); handleReturnFromNativeCallableUnit(callerSF, retRegs, returnValues, retTypes); } } catch (Throwable e) { context.setError(BLangVMErrors.createError(this.context, ip, e.getMessage())); handleError(); } } public static void handleReturnFromNativeCallableUnit(StackFrame callerSF, int[] returnRegIndexes, BValue[] returnValues, BType[] retTypes) { for (int i = 0; i < returnValues.length; i++) { int callersRetRegIndex = returnRegIndexes[i]; BType retType = retTypes[i]; switch (retType.getTag()) { case TypeTags.INT_TAG: if (returnValues[i] == null) { callerSF.longRegs[callersRetRegIndex] = 0; break; } callerSF.longRegs[callersRetRegIndex] = ((BInteger) returnValues[i]).intValue(); break; case TypeTags.FLOAT_TAG: if (returnValues[i] == null) { callerSF.doubleRegs[callersRetRegIndex] = 0; break; } callerSF.doubleRegs[callersRetRegIndex] = ((BFloat) returnValues[i]).floatValue(); break; case TypeTags.STRING_TAG: if (returnValues[i] == null) { callerSF.stringRegs[callersRetRegIndex] = STRING_NULL_VALUE; break; } callerSF.stringRegs[callersRetRegIndex] = returnValues[i].stringValue(); break; case TypeTags.BOOLEAN_TAG: if (returnValues[i] == null) { callerSF.intRegs[callersRetRegIndex] = 0; break; } callerSF.intRegs[callersRetRegIndex] = ((BBoolean) returnValues[i]).booleanValue() ? 1 : 0; break; case TypeTags.BLOB_TAG: if (returnValues[i] == null) { callerSF.byteRegs[callersRetRegIndex] = new byte[0]; break; } callerSF.byteRegs[callersRetRegIndex] = ((BBlob) returnValues[i]).blobValue(); break; default: callerSF.refRegs[callersRetRegIndex] = (BRefType) returnValues[i]; } } } private boolean checkCast(BValue sourceValue, BType targetType) { BType sourceType = sourceValue.getType(); if (sourceType.equals(targetType)) { return true; } if (sourceType.getTag() == TypeTags.STRUCT_TAG && targetType.getTag() == TypeTags.STRUCT_TAG) { return checkStructEquivalency((BStructType) sourceType, (BStructType) targetType); } if (targetType.getTag() == TypeTags.ANY_TAG) { return true; } if (getElementType(sourceType).getTag() == TypeTags.JSON_TAG) { return checkJSONCast(((BJSON) sourceValue).value(), sourceType, targetType); } if (targetType.getTag() == TypeTags.ARRAY_TAG || sourceType.getTag() == TypeTags.ARRAY_TAG) { return checkArrayCast(sourceType, targetType); } return false; } private boolean checkArrayCast(BType sourceType, BType targetType) { if (targetType.getTag() == TypeTags.ARRAY_TAG && sourceType.getTag() == TypeTags.ARRAY_TAG) { BArrayType sourceArrayType = (BArrayType) sourceType; BArrayType targetArrayType = (BArrayType) targetType; if (targetArrayType.getDimensions() > sourceArrayType.getDimensions()) { return false; } return checkArrayCast(sourceArrayType.getElementType(), targetArrayType.getElementType()); } else if (sourceType.getTag() == TypeTags.ARRAY_TAG) { return targetType.getTag() == TypeTags.ANY_TAG; } return sourceType.equals(targetType); } private BType getElementType(BType type) { if (type.getTag() != TypeTags.ARRAY_TAG) { return type; } return getElementType(((BArrayType) type).getElementType()); } public static boolean checkStructEquivalency(BStructType sourceType, BStructType targetType) { BStructType.StructField[] sFields = sourceType.getStructFields(); BStructType.StructField[] tFields = targetType.getStructFields(); if (tFields.length > sFields.length) { return false; } for (int i = 0; i < tFields.length; i++) { if (isAssignable(tFields[i].getFieldType(), sFields[i].getFieldType()) && tFields[i].getFieldName().equals(sFields[i].getFieldName())) { continue; } return false; } return true; } private static boolean isAssignable(BType actualType, BType expType) { if (actualType == expType) { return true; } if (actualType.getTag() == expType.getTag() && isValueType(actualType)) { return true; } else if (actualType.getTag() == expType.getTag() && !isUserDefinedType(actualType) && !isConstrainedType(actualType)) { return true; } else if (actualType.getTag() == expType.getTag() && actualType.getTag() == TypeTags.ARRAY_TAG) { return checkArrayEquivalent(actualType, expType); } else if (actualType.getTag() == expType.getTag() && actualType.getTag() == TypeTags.STRUCT_TAG && checkStructEquivalency((BStructType) actualType, (BStructType) expType)) { return true; } return false; } private static boolean isValueType(BType type) { return type.getTag() <= TypeTags.BLOB_TAG; } private static boolean isUserDefinedType(BType type) { return type.getTag() == TypeTags.STRUCT_TAG || type.getTag() == TypeTags.CONNECTOR_TAG || type.getTag() == TypeTags.ENUM_TAG || type.getTag() == TypeTags.ARRAY_TAG; } private static boolean isConstrainedType(BType type) { return type.getTag() == TypeTags.JSON_TAG; } private static boolean checkArrayEquivalent(BType actualType, BType expType) { if (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) { BArrayType lhrArrayType = (BArrayType) expType; BArrayType rhsArrayType = (BArrayType) actualType; return checkArrayEquivalent(lhrArrayType.getElementType(), rhsArrayType.getElementType()); } if (expType == actualType) { return true; } return false; } private void castJSONToInt(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeInt, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isLong()) { sf.longRegs[j] = jsonNode.longValue(); sf.refRegs[k] = null; return; } sf.longRegs[j] = 0; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.INT_TNAME); } private void castJSONToFloat(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeFloat, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isDouble()) { sf.doubleRegs[j] = jsonNode.doubleValue(); sf.refRegs[k] = null; return; } sf.doubleRegs[j] = 0; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.FLOAT_TNAME); } private void castJSONToString(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { sf.stringRegs[j] = ""; String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeString, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isString()) { sf.stringRegs[j] = jsonNode.stringValue(); sf.refRegs[k] = null; return; } sf.stringRegs[j] = STRING_NULL_VALUE; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.STRING_TNAME); } private void castJSONToBoolean(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeBoolean, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isBoolean()) { sf.intRegs[j] = jsonNode.booleanValue() ? 1 : 0; sf.refRegs[k] = null; return; } sf.intRegs[j] = 0; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.BOOLEAN_TNAME); } private boolean checkJSONEquivalency(JsonNode json, BJSONType sourceType, BJSONType targetType) { BStructType sourceConstrainedType = (BStructType) sourceType.getConstrainedType(); BStructType targetConstrainedType = (BStructType) targetType.getConstrainedType(); if (targetConstrainedType == null) { return true; } if (sourceConstrainedType != null) { if (sourceConstrainedType.equals(targetConstrainedType)) { return true; } return checkStructEquivalency(sourceConstrainedType, targetConstrainedType); } BStructType.StructField[] tFields = targetConstrainedType.getStructFields(); for (int i = 0; i < tFields.length; i++) { String fieldName = tFields[i].getFieldName(); if (!json.has(fieldName)) { return false; } if (!checkJSONCast(json.get(fieldName), sourceType, tFields[i].getFieldType())) { return false; } } return true; } /** * Check the compatibility of casting a JSON to a target type. * * @param json JSON to cast * @param sourceType Type of the source JSON * @param targetType Target type * @return Runtime compatibility for casting */ private boolean checkJSONCast(JsonNode json, BType sourceType, BType targetType) { switch (targetType.getTag()) { case TypeTags.STRING_TAG: return json.isString(); case TypeTags.INT_TAG: return json.isLong(); case TypeTags.FLOAT_TAG: return json.isDouble(); case TypeTags.ARRAY_TAG: if (!json.isArray()) { return false; } BArrayType arrayType = (BArrayType) targetType; for (int i = 0; i < json.size(); i++) { BType sourceElementType = sourceType.getTag() == TypeTags.ARRAY_TAG ? ((BArrayType) sourceType).getElementType() : sourceType; if (!checkJSONCast(json.get(i), sourceElementType, arrayType.getElementType())) { return false; } } return true; case TypeTags.JSON_TAG: if (sourceType.getTag() != TypeTags.JSON_TAG) { return false; } return checkJSONEquivalency(json, (BJSONType) sourceType, (BJSONType) targetType); default: return false; } } private void convertStructToMap(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; BStruct bStruct = (BStruct) sf.refRegs[i]; if (bStruct == null) { sf.refRegs[j] = null; return; } int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; int blobRegIndex = -1; int refRegIndex = -1; BStructType.StructField[] structFields = ((BStructType) bStruct.getType()).getStructFields(); BMap<String, BValue> map = BTypes.typeMap.getEmptyValue(); for (BStructType.StructField structField : structFields) { String key = structField.getFieldName(); BType fieldType = structField.getFieldType(); switch (fieldType.getTag()) { case TypeTags.INT_TAG: map.put(key, new BInteger(bStruct.getIntField(++longRegIndex))); break; case TypeTags.FLOAT_TAG: map.put(key, new BFloat(bStruct.getFloatField(++doubleRegIndex))); break; case TypeTags.STRING_TAG: map.put(key, new BString(bStruct.getStringField(++stringRegIndex))); break; case TypeTags.BOOLEAN_TAG: map.put(key, new BBoolean(bStruct.getBooleanField(++booleanRegIndex) == 1)); break; case TypeTags.BLOB_TAG: map.put(key, new BBlob(bStruct.getBlobField(++blobRegIndex))); break; default: BValue value = bStruct.getRefField(++refRegIndex); map.put(key, value == null ? null : value.copy()); } } sf.refRegs[j] = map; } private void convertStructToJSON(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BStruct bStruct = (BStruct) sf.refRegs[i]; if (bStruct == null) { sf.refRegs[j] = null; return; } try { sf.refRegs[j] = JSONUtils.convertStructToJSON(bStruct); } catch (Exception e) { sf.refRegs[j] = null; String errorMsg = "cannot convert '" + bStruct.getType() + "' to type '" + BTypes.typeJSON + "': " + e.getMessage(); handleTypeConversionError(sf, k, errorMsg, bStruct.getType().toString(), TypeConstants.JSON_TNAME); } } private void convertMapToStruct(int[] operands, StackFrame sf) { int i = operands[0]; int cpIndex = operands[1]; int j = operands[2]; int k = operands[3]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; BMap<String, BValue> bMap = (BMap<String, BValue>) sf.refRegs[i]; if (bMap == null) { sf.refRegs[j] = null; return; } int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; int blobRegIndex = -1; int refRegIndex = -1; BStructType structType = (BStructType) typeRefCPEntry.getType(); BStruct bStruct = new BStruct(structType); StructInfo structInfo = sf.packageInfo.getStructInfo(structType.getName()); Set<String> keys = bMap.keySet(); for (StructFieldInfo fieldInfo : structInfo.getFieldInfoEntries()) { String key = fieldInfo.getName(); BType fieldType = fieldInfo.getFieldType(); BValue mapVal = null; try { boolean containsField = keys.contains(key); DefaultValueAttributeInfo defaultValAttrInfo = null; if (containsField) { mapVal = bMap.get(key); if (mapVal == null && BTypes.isValueType(fieldType)) { throw BLangExceptionHelper.getRuntimeException( RuntimeErrors.INCOMPATIBLE_FIELD_TYPE_FOR_CASTING, key, fieldType, null); } if (mapVal != null && !checkCast(mapVal, fieldType)) { throw BLangExceptionHelper.getRuntimeException( RuntimeErrors.INCOMPATIBLE_FIELD_TYPE_FOR_CASTING, key, fieldType, mapVal.getType()); } } else { defaultValAttrInfo = (DefaultValueAttributeInfo) getAttributeInfo(fieldInfo, AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE); } switch (fieldType.getTag()) { case TypeTags.INT_TAG: longRegIndex++; if (containsField) { bStruct.setIntField(longRegIndex, ((BInteger) mapVal).intValue()); } else if (defaultValAttrInfo != null) { bStruct.setIntField(longRegIndex, defaultValAttrInfo.getDefaultValue().getIntValue()); } break; case TypeTags.FLOAT_TAG: doubleRegIndex++; if (containsField) { bStruct.setFloatField(doubleRegIndex, ((BFloat) mapVal).floatValue()); } else if (defaultValAttrInfo != null) { bStruct.setFloatField(doubleRegIndex, defaultValAttrInfo.getDefaultValue().getFloatValue()); } break; case TypeTags.STRING_TAG: stringRegIndex++; if (containsField) { bStruct.setStringField(stringRegIndex, ((BString) mapVal).stringValue()); } else if (defaultValAttrInfo != null) { bStruct.setStringField(stringRegIndex, defaultValAttrInfo.getDefaultValue().getStringValue()); } break; case TypeTags.BOOLEAN_TAG: booleanRegIndex++; if (containsField) { bStruct.setBooleanField(booleanRegIndex, ((BBoolean) mapVal).booleanValue() ? 1 : 0); } else if (defaultValAttrInfo != null) { bStruct.setBooleanField(booleanRegIndex, defaultValAttrInfo.getDefaultValue().getBooleanValue() ? 1 : 0); } break; case TypeTags.BLOB_TAG: blobRegIndex++; if (containsField && mapVal != null) { bStruct.setBlobField(blobRegIndex, ((BBlob) mapVal).blobValue()); } break; default: bStruct.setRefField(++refRegIndex, (BRefType) mapVal); } } catch (BallerinaException e) { sf.refRegs[j] = null; String errorMsg = "cannot convert '" + bMap.getType() + "' to type '" + structType + ": " + e.getMessage(); handleTypeConversionError(sf, k, errorMsg, TypeConstants.MAP_TNAME, structType.toString()); return; } } sf.refRegs[j] = bStruct; sf.refRegs[k] = null; } private void convertJSONToStruct(int[] operands, StackFrame sf) { int i = operands[0]; int cpIndex = operands[1]; int j = operands[2]; int k = operands[3]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; BJSON bjson = (BJSON) sf.refRegs[i]; if (bjson == null) { sf.refRegs[j] = null; return; } try { sf.refRegs[j] = JSONUtils.convertJSONToStruct(bjson, (BStructType) typeRefCPEntry.getType(), sf.packageInfo); sf.refRegs[k] = null; } catch (Exception e) { sf.refRegs[j] = null; String errorMsg = "cannot convert '" + TypeConstants.JSON_TNAME + "' to type '" + typeRefCPEntry.getType() + "': " + e.getMessage(); handleTypeConversionError(sf, k, errorMsg, TypeConstants.JSON_TNAME, typeRefCPEntry.getType().toString()); } } private void handleNullRefError() { context.setError(BLangVMErrors.createNullRefError(context, ip)); handleError(); } private void handleError() { int currentIP = ip - 1; StackFrame currentFrame = controlStack.currentFrame; ErrorTableEntry match = null; while (controlStack.currentFrame != null) { match = ErrorTableEntry.getMatch(currentFrame.packageInfo, currentIP, context.getError()); if (match != null) { break; } controlStack.popFrame(); context.setError(currentFrame.errorThrown); if (controlStack.currentFrame == null) { break; } currentIP = currentFrame.retAddrs - 1; currentFrame = controlStack.currentFrame; } if (controlStack.currentFrame == null) { ip = -1; if (context.getServiceInfo() == null) { return; } BServerConnectorFuture connectorFuture = context.getConnectorFuture(); try { connectorFuture.notifyFailure(new BallerinaException(BLangVMErrors .getPrintableStackTrace(context.getError()))); } catch (Exception e) { logger.error("cannot handle error using the error handler: " + e.getMessage(), e); } return; } if (match != null) { PackageInfo packageInfo = currentFrame.packageInfo; this.constPool = packageInfo.getConstPoolEntries(); this.code = packageInfo.getInstructions(); ip = match.getIpTarget(); return; } ip = -1; logger.error("fatal error. incorrect error table entry."); } private AttributeInfo getAttributeInfo(AttributeInfoPool attrInfoPool, AttributeInfo.Kind attrInfoKind) { for (AttributeInfo attributeInfo : attrInfoPool.getAttributeInfoEntries()) { if (attributeInfo.getKind() == attrInfoKind) { return attributeInfo; } } return null; } private boolean isWaitingOnNonBlockingAction() { return context.nonBlockingContext != null; } }
class BLangVM { private static final String JOIN_TYPE_SOME = "some"; private static final Logger logger = LoggerFactory.getLogger(BLangVM.class); private Context context; private ControlStack controlStack; private ProgramFile programFile; private ConstantPoolEntry[] constPool; private int ip = 0; private Instruction[] code; private StructureType globalMemBlock; public BLangVM(ProgramFile programFile) { this.programFile = programFile; this.globalMemBlock = programFile.getGlobalMemoryBlock(); } private void traceCode(PackageInfo packageInfo) { PrintStream printStream = System.out; for (int i = 0; i < code.length; i++) { printStream.println(i + ": " + code[i].toString()); } } public void run(Context ctx) { StackFrame currentFrame = ctx.getControlStack().getCurrentFrame(); this.constPool = currentFrame.packageInfo.getConstPoolEntries(); this.code = currentFrame.packageInfo.getInstructions(); this.context = ctx; this.controlStack = context.getControlStack(); this.ip = context.getStartIP(); if (context.getError() != null) { handleError(); } else if (isWaitingOnNonBlockingAction()) { BType[] retTypes = context.nonBlockingContext.actionInfo.getRetParamTypes(); StackFrame calleeSF = controlStack.popFrame(); this.constPool = controlStack.currentFrame.packageInfo.getConstPoolEntries(); this.code = controlStack.currentFrame.packageInfo.getInstructions(); handleReturnFromNativeCallableUnit(controlStack.currentFrame, context.nonBlockingContext.retRegs, calleeSF.returnValues, retTypes); context.nonBlockingContext = null; } try { exec(); } catch (Throwable e) { String message; if (e.getMessage() == null) { message = "unknown error occurred"; } else { message = e.getMessage(); } context.setError(BLangVMErrors.createError(context, ip, message)); handleError(); } finally { Debugger debugger = programFile.getDebugger(); if (debugger.isDebugEnabled() && debugger.isClientSessionActive() && context.getDebugContext().isAtive()) { context.getDebugContext().setActive(false); debugger.releaseDebugSessionLock(); } if (!isWaitingOnNonBlockingAction() || context.getError() != null) { ctx.endTrackWorker(); } } } public void execWorker(Context context, int startIP) { context.setStartIP(startIP); Debugger debugger = programFile.getDebugger(); if (debugger.isDebugEnabled() && debugger.isClientSessionActive()) { DebuggerUtil.initDebugContext(context, debugger); } run(context); } /** * Act as a virtual CPU. */ private void execCmpAndBranchOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; switch (opcode) { case InstructionCodes.IGT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] > sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FGT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] > sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.IGE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] >= sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FGE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] >= sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.ILT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] < sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FLT: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] < sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.ILE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] <= sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FLE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] <= sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.REQ_NULL: i = operands[0]; j = operands[1]; if (sf.refRegs[i] == null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.RNE_NULL: i = operands[0]; j = operands[1]; if (sf.refRegs[i] != null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.SEQ_NULL: i = operands[0]; j = operands[1]; if (sf.stringRegs[i] == null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.SNE_NULL: i = operands[0]; j = operands[1]; if (sf.stringRegs[i] != null) { sf.intRegs[j] = 1; } else { sf.intRegs[j] = 0; } break; case InstructionCodes.BR_TRUE: i = operands[0]; j = operands[1]; if (sf.intRegs[i] == 1) { ip = j; } break; case InstructionCodes.BR_FALSE: i = operands[0]; j = operands[1]; if (sf.intRegs[i] == 0) { ip = j; } break; case InstructionCodes.GOTO: i = operands[0]; ip = i; break; case InstructionCodes.HALT: ip = -1; break; default: throw new UnsupportedOperationException(); } } private void execLoadOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int lvIndex; int fieldIndex; BIntArray bIntArray; BFloatArray bFloatArray; BStringArray bStringArray; BBooleanArray bBooleanArray; BBlobArray bBlobArray; BRefValueArray bArray; StructureType structureType; BMap<String, BRefType> bMap; BJSON jsonVal; switch (opcode) { case InstructionCodes.IMOVE: lvIndex = operands[0]; i = operands[1]; sf.longRegs[i] = sf.longRegs[lvIndex]; break; case InstructionCodes.FMOVE: lvIndex = operands[0]; i = operands[1]; sf.doubleRegs[i] = sf.doubleRegs[lvIndex]; break; case InstructionCodes.SMOVE: lvIndex = operands[0]; i = operands[1]; sf.stringRegs[i] = sf.stringRegs[lvIndex]; break; case InstructionCodes.BMOVE: lvIndex = operands[0]; i = operands[1]; sf.intRegs[i] = sf.intRegs[lvIndex]; break; case InstructionCodes.LMOVE: lvIndex = operands[0]; i = operands[1]; sf.byteRegs[i] = sf.byteRegs[lvIndex]; break; case InstructionCodes.RMOVE: lvIndex = operands[0]; i = operands[1]; sf.refRegs[i] = sf.refRegs[lvIndex]; break; case InstructionCodes.IALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bIntArray = (BIntArray) sf.refRegs[i]; if (bIntArray == null) { handleNullRefError(); break; } try { sf.longRegs[k] = bIntArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.FALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bFloatArray = (BFloatArray) sf.refRegs[i]; if (bFloatArray == null) { handleNullRefError(); break; } try { sf.doubleRegs[k] = bFloatArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.SALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bStringArray = (BStringArray) sf.refRegs[i]; if (bStringArray == null) { handleNullRefError(); break; } try { sf.stringRegs[k] = bStringArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.BALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bBooleanArray = (BBooleanArray) sf.refRegs[i]; if (bBooleanArray == null) { handleNullRefError(); break; } try { sf.intRegs[k] = bBooleanArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.LALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bBlobArray = (BBlobArray) sf.refRegs[i]; if (bBlobArray == null) { handleNullRefError(); break; } try { sf.byteRegs[k] = bBlobArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.RALOAD: i = operands[0]; j = operands[1]; k = operands[2]; bArray = (BRefValueArray) sf.refRegs[i]; if (bArray == null) { handleNullRefError(); break; } try { sf.refRegs[k] = bArray.get(sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.JSONALOAD: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } try { sf.refRegs[k] = JSONUtils.getArrayElement(jsonVal, sf.longRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.IGLOAD: i = operands[0]; j = operands[1]; sf.longRegs[j] = globalMemBlock.getIntField(i); break; case InstructionCodes.FGLOAD: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = globalMemBlock.getFloatField(i); break; case InstructionCodes.SGLOAD: i = operands[0]; j = operands[1]; sf.stringRegs[j] = globalMemBlock.getStringField(i); break; case InstructionCodes.BGLOAD: i = operands[0]; j = operands[1]; sf.intRegs[j] = globalMemBlock.getBooleanField(i); break; case InstructionCodes.LGLOAD: i = operands[0]; j = operands[1]; sf.byteRegs[j] = globalMemBlock.getBlobField(i); break; case InstructionCodes.RGLOAD: i = operands[0]; j = operands[1]; sf.refRegs[j] = globalMemBlock.getRefField(i); break; case InstructionCodes.IFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.longRegs[j] = structureType.getIntField(fieldIndex); break; case InstructionCodes.FFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.doubleRegs[j] = structureType.getFloatField(fieldIndex); break; case InstructionCodes.SFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.stringRegs[j] = structureType.getStringField(fieldIndex); break; case InstructionCodes.BFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.intRegs[j] = structureType.getBooleanField(fieldIndex); break; case InstructionCodes.LFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.byteRegs[j] = structureType.getBlobField(fieldIndex); break; case InstructionCodes.RFIELDLOAD: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } sf.refRegs[j] = structureType.getRefField(fieldIndex); break; case InstructionCodes.MAPLOAD: i = operands[0]; j = operands[1]; k = operands[2]; bMap = (BMap<String, BRefType>) sf.refRegs[i]; if (bMap == null) { handleNullRefError(); break; } sf.refRegs[k] = bMap.get(sf.stringRegs[j]); break; case InstructionCodes.JSONLOAD: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } sf.refRegs[k] = JSONUtils.getElement(jsonVal, sf.stringRegs[j]); break; case InstructionCodes.ENUMERATORLOAD: i = operands[0]; j = operands[1]; k = operands[2]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[i]; BEnumType enumType = (BEnumType) typeRefCPEntry.getType(); sf.refRegs[k] = enumType.getEnumerator(j); break; default: throw new UnsupportedOperationException(); } } private void execStoreOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int lvIndex; int fieldIndex; BIntArray bIntArray; BFloatArray bFloatArray; BStringArray bStringArray; BBooleanArray bBooleanArray; BBlobArray bBlobArray; BRefValueArray bArray; StructureType structureType; BMap<String, BRefType> bMap; BJSON jsonVal; switch (opcode) { case InstructionCodes.ISTORE: i = operands[0]; lvIndex = operands[1]; sf.longRegs[lvIndex] = sf.longRegs[i]; break; case InstructionCodes.FSTORE: i = operands[0]; lvIndex = operands[1]; sf.doubleRegs[lvIndex] = sf.doubleRegs[i]; break; case InstructionCodes.SSTORE: i = operands[0]; lvIndex = operands[1]; sf.stringRegs[lvIndex] = sf.stringRegs[i]; break; case InstructionCodes.BSTORE: i = operands[0]; lvIndex = operands[1]; sf.intRegs[lvIndex] = sf.intRegs[i]; break; case InstructionCodes.LSTORE: i = operands[0]; lvIndex = operands[1]; sf.byteRegs[lvIndex] = sf.byteRegs[i]; break; case InstructionCodes.RSTORE: i = operands[0]; lvIndex = operands[1]; sf.refRegs[lvIndex] = sf.refRegs[i]; break; case InstructionCodes.IASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bIntArray = (BIntArray) sf.refRegs[i]; if (bIntArray == null) { handleNullRefError(); break; } try { bIntArray.add(sf.longRegs[j], sf.longRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.FASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bFloatArray = (BFloatArray) sf.refRegs[i]; if (bFloatArray == null) { handleNullRefError(); break; } try { bFloatArray.add(sf.longRegs[j], sf.doubleRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.SASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bStringArray = (BStringArray) sf.refRegs[i]; if (bStringArray == null) { handleNullRefError(); break; } try { bStringArray.add(sf.longRegs[j], sf.stringRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.BASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bBooleanArray = (BBooleanArray) sf.refRegs[i]; if (bBooleanArray == null) { handleNullRefError(); break; } try { bBooleanArray.add(sf.longRegs[j], sf.intRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.LASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bBlobArray = (BBlobArray) sf.refRegs[i]; if (bBlobArray == null) { handleNullRefError(); break; } try { bBlobArray.add(sf.longRegs[j], sf.byteRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.RASTORE: i = operands[0]; j = operands[1]; k = operands[2]; bArray = (BRefValueArray) sf.refRegs[i]; if (bArray == null) { handleNullRefError(); break; } try { bArray.add(sf.longRegs[j], sf.refRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.JSONASTORE: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } try { JSONUtils.setArrayElement(jsonVal, sf.longRegs[j], (BJSON) sf.refRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.IGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setIntField(j, sf.longRegs[i]); break; case InstructionCodes.FGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setFloatField(j, sf.doubleRegs[i]); break; case InstructionCodes.SGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setStringField(j, sf.stringRegs[i]); break; case InstructionCodes.BGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setBooleanField(j, sf.intRegs[i]); break; case InstructionCodes.LGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setBlobField(j, sf.byteRegs[i]); break; case InstructionCodes.RGSTORE: i = operands[0]; j = operands[1]; globalMemBlock.setRefField(j, sf.refRegs[i]); break; case InstructionCodes.IFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setIntField(fieldIndex, sf.longRegs[j]); break; case InstructionCodes.FFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setFloatField(fieldIndex, sf.doubleRegs[j]); break; case InstructionCodes.SFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setStringField(fieldIndex, sf.stringRegs[j]); break; case InstructionCodes.BFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setBooleanField(fieldIndex, sf.intRegs[j]); break; case InstructionCodes.LFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setBlobField(fieldIndex, sf.byteRegs[j]); break; case InstructionCodes.RFIELDSTORE: i = operands[0]; fieldIndex = operands[1]; j = operands[2]; structureType = (StructureType) sf.refRegs[i]; if (structureType == null) { handleNullRefError(); break; } structureType.setRefField(fieldIndex, sf.refRegs[j]); break; case InstructionCodes.MAPSTORE: i = operands[0]; j = operands[1]; k = operands[2]; bMap = (BMap<String, BRefType>) sf.refRegs[i]; if (bMap == null) { handleNullRefError(); break; } bMap.put(sf.stringRegs[j], sf.refRegs[k]); break; case InstructionCodes.JSONSTORE: i = operands[0]; j = operands[1]; k = operands[2]; jsonVal = (BJSON) sf.refRegs[i]; if (jsonVal == null) { handleNullRefError(); break; } JSONUtils.setElement(jsonVal, sf.stringRegs[j], (BJSON) sf.refRegs[k]); break; default: throw new UnsupportedOperationException(); } } private void execBinaryOpCodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; switch (opcode) { case InstructionCodes.IADD: i = operands[0]; j = operands[1]; k = operands[2]; sf.longRegs[k] = sf.longRegs[i] + sf.longRegs[j]; break; case InstructionCodes.FADD: i = operands[0]; j = operands[1]; k = operands[2]; sf.doubleRegs[k] = sf.doubleRegs[i] + sf.doubleRegs[j]; break; case InstructionCodes.SADD: i = operands[0]; j = operands[1]; k = operands[2]; sf.stringRegs[k] = sf.stringRegs[i] + sf.stringRegs[j]; break; case InstructionCodes.XMLADD: i = operands[0]; j = operands[1]; k = operands[2]; BXML lhsXMLVal = (BXML) sf.refRegs[i]; BXML rhsXMLVal = (BXML) sf.refRegs[j]; if (lhsXMLVal == null || rhsXMLVal == null) { handleNullRefError(); break; } sf.refRegs[k] = XMLUtils.concatenate(lhsXMLVal, rhsXMLVal); break; case InstructionCodes.ISUB: i = operands[0]; j = operands[1]; k = operands[2]; sf.longRegs[k] = sf.longRegs[i] - sf.longRegs[j]; break; case InstructionCodes.FSUB: i = operands[0]; j = operands[1]; k = operands[2]; sf.doubleRegs[k] = sf.doubleRegs[i] - sf.doubleRegs[j]; break; case InstructionCodes.IMUL: i = operands[0]; j = operands[1]; k = operands[2]; sf.longRegs[k] = sf.longRegs[i] * sf.longRegs[j]; break; case InstructionCodes.FMUL: i = operands[0]; j = operands[1]; k = operands[2]; sf.doubleRegs[k] = sf.doubleRegs[i] * sf.doubleRegs[j]; break; case InstructionCodes.IDIV: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.longRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.longRegs[k] = sf.longRegs[i] / sf.longRegs[j]; break; case InstructionCodes.FDIV: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.doubleRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.doubleRegs[k] = sf.doubleRegs[i] / sf.doubleRegs[j]; break; case InstructionCodes.IMOD: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.longRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.longRegs[k] = sf.longRegs[i] % sf.longRegs[j]; break; case InstructionCodes.FMOD: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.doubleRegs[j] == 0) { context.setError(BLangVMErrors.createError(context, ip, " / by zero")); handleError(); break; } sf.doubleRegs[k] = sf.doubleRegs[i] % sf.doubleRegs[j]; break; case InstructionCodes.INEG: i = operands[0]; j = operands[1]; sf.longRegs[j] = -sf.longRegs[i]; break; case InstructionCodes.FNEG: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = -sf.doubleRegs[i]; break; case InstructionCodes.BNOT: i = operands[0]; j = operands[1]; sf.intRegs[j] = sf.intRegs[i] == 0 ? 1 : 0; break; case InstructionCodes.IEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] == sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] == sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.SEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = StringUtils.isEqual(sf.stringRegs[i], sf.stringRegs[j]) ? 1 : 0; break; case InstructionCodes.BEQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.intRegs[i] == sf.intRegs[j] ? 1 : 0; break; case InstructionCodes.REQ: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.refRegs[i] == sf.refRegs[j] ? 1 : 0; break; case InstructionCodes.TEQ: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.refRegs[i] == null || sf.refRegs[j] == null) { handleNullRefError(); } sf.intRegs[k] = sf.refRegs[i].equals(sf.refRegs[j]) ? 1 : 0; break; case InstructionCodes.INE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.longRegs[i] != sf.longRegs[j] ? 1 : 0; break; case InstructionCodes.FNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.doubleRegs[i] != sf.doubleRegs[j] ? 1 : 0; break; case InstructionCodes.SNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = !StringUtils.isEqual(sf.stringRegs[i], sf.stringRegs[j]) ? 1 : 0; break; case InstructionCodes.BNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.intRegs[i] != sf.intRegs[j] ? 1 : 0; break; case InstructionCodes.RNE: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[k] = sf.refRegs[i] != sf.refRegs[j] ? 1 : 0; break; case InstructionCodes.TNE: i = operands[0]; j = operands[1]; k = operands[2]; if (sf.refRegs[i] == null || sf.refRegs[j] == null) { handleNullRefError(); } sf.intRegs[k] = (!sf.refRegs[i].equals(sf.refRegs[j])) ? 1 : 0; break; default: throw new UnsupportedOperationException(); } } private void execXMLOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int localNameIndex; int uriIndex; int prefixIndex; BXML<?> xmlVal; BXMLQName xmlQName; switch (opcode) { case InstructionCodes.XMLATTRSTORE: i = operands[0]; j = operands[1]; k = operands[2]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { handleNullRefError(); break; } xmlQName = (BXMLQName) sf.refRegs[j]; if (xmlQName == null) { handleNullRefError(); break; } xmlVal.setAttribute(xmlQName.getLocalName(), xmlQName.getUri(), xmlQName.getPrefix(), sf.stringRegs[k]); break; case InstructionCodes.XMLATTRLOAD: i = operands[0]; j = operands[1]; k = operands[2]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { handleNullRefError(); break; } xmlQName = (BXMLQName) sf.refRegs[j]; if (xmlQName == null) { handleNullRefError(); break; } sf.stringRegs[k] = xmlVal.getAttribute(xmlQName.getLocalName(), xmlQName.getUri(), xmlQName.getPrefix()); break; case InstructionCodes.XML2XMLATTRS: i = operands[0]; j = operands[1]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { sf.refRegs[j] = null; break; } sf.refRegs[j] = new BXMLAttributes(xmlVal); break; case InstructionCodes.S2QNAME: i = operands[0]; j = operands[1]; k = operands[2]; String qNameStr = sf.stringRegs[i]; int parenEndIndex = qNameStr.indexOf('}'); if (qNameStr.startsWith("{") && parenEndIndex > 0) { sf.stringRegs[j] = qNameStr.substring(parenEndIndex + 1, qNameStr.length()); sf.stringRegs[k] = qNameStr.substring(1, parenEndIndex); } else { sf.stringRegs[j] = qNameStr; sf.stringRegs[k] = STRING_NULL_VALUE; } break; case InstructionCodes.NEWQNAME: localNameIndex = operands[0]; uriIndex = operands[1]; prefixIndex = operands[2]; i = operands[3]; String localname = sf.stringRegs[localNameIndex]; localname = StringEscapeUtils.escapeXml11(localname); String prefix = sf.stringRegs[prefixIndex]; prefix = StringEscapeUtils.escapeXml11(prefix); sf.refRegs[i] = new BXMLQName(localname, sf.stringRegs[uriIndex], prefix); break; case InstructionCodes.XMLLOAD: i = operands[0]; j = operands[1]; k = operands[2]; xmlVal = (BXML) sf.refRegs[i]; if (xmlVal == null) { handleNullRefError(); break; } long index = sf.longRegs[j]; sf.refRegs[k] = xmlVal.getItem(index); break; case InstructionCodes.NEWXMLELEMENT: case InstructionCodes.NEWXMLCOMMENT: case InstructionCodes.NEWXMLTEXT: case InstructionCodes.NEWXMLPI: case InstructionCodes.XMLSTORE: execXMLCreationOpcodes(sf, opcode, operands); break; default: throw new UnsupportedOperationException(); } } private void execTypeCastOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int cpIndex; BRefType bRefType; TypeRefCPEntry typeRefCPEntry; switch (opcode) { case InstructionCodes.I2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BInteger(sf.longRegs[i]); break; case InstructionCodes.F2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BFloat(sf.doubleRegs[i]); break; case InstructionCodes.S2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BString(sf.stringRegs[i]); break; case InstructionCodes.B2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BBoolean(sf.intRegs[i] == 1); break; case InstructionCodes.L2ANY: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BBlob(sf.byteRegs[i]); break; case InstructionCodes.ANY2I: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.longRegs[j] = 0; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeInt); } else if (bRefType.getType() == BTypes.typeInt) { sf.refRegs[k] = null; sf.longRegs[j] = ((BInteger) bRefType).intValue(); } else { sf.longRegs[j] = 0; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeInt); } break; case InstructionCodes.ANY2F: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.doubleRegs[j] = 0; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeFloat); } else if (bRefType.getType() == BTypes.typeFloat) { sf.refRegs[k] = null; sf.doubleRegs[j] = ((BFloat) bRefType).floatValue(); } else { sf.doubleRegs[j] = 0; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeFloat); } break; case InstructionCodes.ANY2S: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.stringRegs[j] = STRING_NULL_VALUE; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeString); } else if (bRefType.getType() == BTypes.typeString) { sf.refRegs[k] = null; sf.stringRegs[j] = bRefType.stringValue(); } else { sf.stringRegs[j] = STRING_NULL_VALUE; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeString); } break; case InstructionCodes.ANY2B: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.intRegs[j] = 0; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeBoolean); } else if (bRefType.getType() == BTypes.typeBoolean) { sf.refRegs[k] = null; sf.intRegs[j] = ((BBoolean) bRefType).booleanValue() ? 1 : 0; } else { sf.intRegs[j] = 0; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeBoolean); } break; case InstructionCodes.ANY2L: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.byteRegs[j] = new byte[0]; handleTypeCastError(sf, k, BTypes.typeNull, BTypes.typeBlob); } else if (bRefType.getType() == BTypes.typeBlob) { sf.refRegs[k] = null; sf.byteRegs[j] = ((BBlob) bRefType).blobValue(); } else { sf.byteRegs[j] = new byte[0]; handleTypeCastError(sf, k, bRefType.getType(), BTypes.typeBlob); } break; case InstructionCodes.ANY2JSON: handleAnyToRefTypeCast(sf, operands, BTypes.typeJSON); break; case InstructionCodes.ANY2XML: handleAnyToRefTypeCast(sf, operands, BTypes.typeXML); break; case InstructionCodes.ANY2MAP: handleAnyToRefTypeCast(sf, operands, BTypes.typeMap); break; case InstructionCodes.ANY2TYPE: handleAnyToRefTypeCast(sf, operands, BTypes.typeType); break; case InstructionCodes.ANY2DT: handleAnyToRefTypeCast(sf, operands, BTypes.typeDatatable); break; case InstructionCodes.ANY2E: case InstructionCodes.ANY2T: case InstructionCodes.ANY2C: case InstructionCodes.CHECKCAST: i = operands[0]; cpIndex = operands[1]; j = operands[2]; k = operands[3]; typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.refRegs[j] = null; sf.refRegs[k] = null; } else if (checkCast(bRefType, typeRefCPEntry.getType())) { sf.refRegs[j] = sf.refRegs[i]; sf.refRegs[k] = null; } else { sf.refRegs[j] = null; handleTypeCastError(sf, k, bRefType.getType(), typeRefCPEntry.getType()); } break; case InstructionCodes.NULL2JSON: j = operands[1]; sf.refRegs[j] = new BJSON("null"); break; case InstructionCodes.B2JSON: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BJSON(sf.intRegs[i] == 1 ? "true" : "false"); break; case InstructionCodes.JSON2I: castJSONToInt(operands, sf); break; case InstructionCodes.JSON2F: castJSONToFloat(operands, sf); break; case InstructionCodes.JSON2S: castJSONToString(operands, sf); break; case InstructionCodes.JSON2B: castJSONToBoolean(operands, sf); break; case InstructionCodes.NULL2S: j = operands[1]; sf.stringRegs[j] = null; break; default: throw new UnsupportedOperationException(); } } private void execTypeConversionOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; BRefType bRefType; String str; switch (opcode) { case InstructionCodes.I2F: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = (double) sf.longRegs[i]; break; case InstructionCodes.I2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = Long.toString(sf.longRegs[i]); break; case InstructionCodes.I2B: i = operands[0]; j = operands[1]; sf.intRegs[j] = sf.longRegs[i] != 0 ? 1 : 0; break; case InstructionCodes.I2JSON: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BJSON(Long.toString(sf.longRegs[i])); break; case InstructionCodes.F2I: i = operands[0]; j = operands[1]; sf.longRegs[j] = (long) sf.doubleRegs[i]; break; case InstructionCodes.F2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = Double.toString(sf.doubleRegs[i]); break; case InstructionCodes.F2B: i = operands[0]; j = operands[1]; sf.intRegs[j] = sf.doubleRegs[i] != 0.0 ? 1 : 0; break; case InstructionCodes.F2JSON: i = operands[0]; j = operands[1]; sf.refRegs[j] = new BJSON(Double.toString(sf.doubleRegs[i])); break; case InstructionCodes.S2I: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; if (str == null) { sf.longRegs[j] = 0; handleTypeConversionError(sf, k, null, TypeConstants.INT_TNAME); break; } try { sf.longRegs[j] = Long.parseLong(str); sf.refRegs[k] = null; } catch (NumberFormatException e) { sf.longRegs[j] = 0; handleTypeConversionError(sf, k, TypeConstants.STRING_TNAME, TypeConstants.INT_TNAME); } break; case InstructionCodes.S2F: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; if (str == null) { sf.doubleRegs[j] = 0; handleTypeConversionError(sf, k, null, TypeConstants.FLOAT_TNAME); break; } try { sf.doubleRegs[j] = Double.parseDouble(str); sf.refRegs[k] = null; } catch (NumberFormatException e) { sf.doubleRegs[j] = 0; handleTypeConversionError(sf, k, TypeConstants.STRING_TNAME, TypeConstants.FLOAT_TNAME); } break; case InstructionCodes.S2B: i = operands[0]; j = operands[1]; k = operands[2]; sf.intRegs[j] = Boolean.parseBoolean(sf.stringRegs[i]) ? 1 : 0; sf.refRegs[k] = null; break; case InstructionCodes.S2JSON: i = operands[0]; j = operands[1]; str = StringEscapeUtils.escapeJson(sf.stringRegs[i]); sf.refRegs[j] = str == null ? null : new BJSON("\"" + str + "\""); break; case InstructionCodes.B2I: i = operands[0]; j = operands[1]; sf.longRegs[j] = sf.intRegs[i]; break; case InstructionCodes.B2F: i = operands[0]; j = operands[1]; sf.doubleRegs[j] = sf.intRegs[i]; break; case InstructionCodes.B2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = sf.intRegs[i] == 1 ? "true" : "false"; break; case InstructionCodes.DT2XML: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { handleNullRefError(); break; } try { sf.refRegs[j] = XMLUtils.datatableToXML((BDataTable) bRefType, context.isInTransaction()); sf.refRegs[k] = null; } catch (Exception e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, TypeConstants.DATATABLE_TNAME, TypeConstants.XML_TNAME); } break; case InstructionCodes.DT2JSON: i = operands[0]; j = operands[1]; k = operands[2]; bRefType = sf.refRegs[i]; if (bRefType == null) { handleNullRefError(); break; } try { sf.refRegs[j] = JSONUtils.toJSON((BDataTable) bRefType, context.isInTransaction()); sf.refRegs[k] = null; } catch (Exception e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, TypeConstants.DATATABLE_TNAME, TypeConstants.XML_TNAME); } break; case InstructionCodes.T2MAP: convertStructToMap(operands, sf); break; case InstructionCodes.T2JSON: convertStructToJSON(operands, sf); break; case InstructionCodes.MAP2T: convertMapToStruct(operands, sf); break; case InstructionCodes.JSON2T: convertJSONToStruct(operands, sf); break; case InstructionCodes.XMLATTRS2MAP: i = operands[0]; j = operands[1]; bRefType = sf.refRegs[i]; if (bRefType == null) { sf.refRegs[j] = null; break; } sf.refRegs[j] = ((BXMLAttributes) sf.refRegs[i]).value(); break; case InstructionCodes.S2XML: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; if (str == null) { sf.refRegs[j] = null; sf.refRegs[k] = null; break; } try { sf.refRegs[j] = XMLUtils.parse(str); sf.refRegs[k] = null; } catch (BallerinaException e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, e.getMessage(), TypeConstants.STRING_TNAME, TypeConstants.XML_TNAME); } break; case InstructionCodes.S2JSONX: i = operands[0]; j = operands[1]; k = operands[2]; str = sf.stringRegs[i]; try { sf.refRegs[j] = str == null ? null : new BJSON(str); sf.refRegs[k] = null; } catch (BallerinaException e) { sf.refRegs[j] = null; handleTypeConversionError(sf, k, e.getMessage(), TypeConstants.STRING_TNAME, TypeConstants.JSON_TNAME); } break; case InstructionCodes.XML2S: i = operands[0]; j = operands[1]; sf.stringRegs[j] = sf.refRegs[i].stringValue(); break; default: throw new UnsupportedOperationException(); } } private void execIteratorOperation(StackFrame sf, Instruction instruction) { int i, j; BCollection collection; BIterator iterator; InstructionIteratorNext nextInstruction; switch (instruction.getOpcode()) { case InstructionCodes.ITR_NEW: i = instruction.getOperands()[0]; j = instruction.getOperands()[1]; collection = (BCollection) sf.refRegs[i]; if (collection == null) { handleNullRefError(); return; } sf.refRegs[j] = collection.newIterator(); break; case InstructionCodes.ITR_HAS_NEXT: i = instruction.getOperands()[0]; j = instruction.getOperands()[1]; iterator = (BIterator) sf.refRegs[i]; if (iterator == null) { sf.intRegs[j] = 0; return; } sf.intRegs[j] = iterator.hasNext() ? 1 : 0; break; case InstructionCodes.ITR_NEXT: nextInstruction = (InstructionIteratorNext) instruction; iterator = (BIterator) sf.refRegs[nextInstruction.iteratorIndex]; if (iterator == null) { return; } BValue[] values = iterator.getNext(nextInstruction.arity); copyValuesToRegistries(nextInstruction.typeTags, nextInstruction.retRegs, values, sf); break; } } private void copyValuesToRegistries(int[] typeTags, int[] targetReg, BValue[] values, StackFrame sf) { for (int i = 0; i < typeTags.length; i++) { BValue source = values[i]; int target = targetReg[i]; switch (typeTags[i]) { case TypeTags.INT_TAG: sf.longRegs[target] = ((BInteger) source).intValue(); break; case TypeTags.FLOAT_TAG: sf.doubleRegs[target] = ((BFloat) source).floatValue(); break; case TypeTags.STRING_TAG: sf.stringRegs[target] = source.stringValue(); break; case TypeTags.BOOLEAN_TAG: sf.intRegs[target] = ((BBoolean) source).booleanValue() ? 1 : 0; break; case TypeTags.BLOB_TAG: sf.byteRegs[target] = ((BBlob) source).blobValue(); break; default: sf.refRegs[target] = (BRefType) source; } } } private void execXMLCreationOpcodes(StackFrame sf, int opcode, int[] operands) { int i; int j; int k; int l; BXML<?> xmlVal; switch (opcode) { case InstructionCodes.NEWXMLELEMENT: i = operands[0]; j = operands[1]; k = operands[2]; l = operands[3]; BXMLQName startTagName = (BXMLQName) sf.refRegs[j]; BXMLQName endTagName = (BXMLQName) sf.refRegs[k]; try { sf.refRegs[i] = XMLUtils.createXMLElement(startTagName, endTagName, sf.stringRegs[l]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.NEWXMLCOMMENT: i = operands[0]; j = operands[1]; try { sf.refRegs[i] = XMLUtils.createXMLComment(sf.stringRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.NEWXMLTEXT: i = operands[0]; j = operands[1]; try { sf.refRegs[i] = XMLUtils.createXMLText(sf.stringRegs[j]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.NEWXMLPI: i = operands[0]; j = operands[1]; k = operands[2]; try { sf.refRegs[i] = XMLUtils.createXMLProcessingInstruction(sf.stringRegs[j], sf.stringRegs[k]); } catch (Exception e) { context.setError(BLangVMErrors.createError(context, ip, e.getMessage())); handleError(); } break; case InstructionCodes.XMLSTORE: i = operands[0]; j = operands[1]; xmlVal = (BXML<?>) sf.refRegs[i]; BXML<?> child = (BXML<?>) sf.refRegs[j]; xmlVal.addChildren(child); break; } } /** * Method to calculate and detect debug points when the instruction point is given. */ private void debug() { Debugger debugger = programFile.getDebugger(); if (!debugger.isClientSessionActive()) { return; } DebugContext debugContext = context.getDebugContext(); LineNumberInfo currentExecLine = debugger .getLineNumber(controlStack.currentFrame.packageInfo.getPkgPath(), ip); /* Below if check stops hitting the same debug line again and again in case that single line has multiple instructions. */ if (currentExecLine.equals(debugContext.getLastLine()) || debugPointCheck(currentExecLine, debugger, debugContext)) { return; } switch (debugContext.getCurrentCommand()) { case RESUME: /* In case of a for loop, need to clear the last hit line, so that, same line can get hit again. */ debugContext.clearLastDebugLine(); break; case STEP_IN: debugHit(currentExecLine, debugger, debugContext); break; case STEP_OVER: if (controlStack.currentFrame == debugContext.getStackFrame()) { debugHit(currentExecLine, debugger, debugContext); return; } /* This is either, 1) function call (instruction of the next function) 2) returning to the previous function below if condition checks the 2nd possibility, and if that's the case, then it's a debug hit. To check that, it needs to check whether last line contains return instruction or not. (return line may have multiple instructions, ex - return v1 + v2 * v3 + v4; */ if (debugContext.getLastLine().checkIpRangeForInstructionCode(code, InstructionCodes.RET) && controlStack.currentFrame == debugContext.getStackFrame().prevStackFrame) { debugHit(currentExecLine, debugger, debugContext); return; } /* This means it's a function call. So using intermediate step to wait until returning from that function call. */ debugContext.setCurrentCommand(DebugCommand.STEP_OVER_INTMDT); break; case STEP_OVER_INTMDT: /* Here it checks whether it has returned to the previous stack frame (that is previous function) if so, then debug hit. */ interMediateDebugCheck(currentExecLine, debugger, debugContext); break; case STEP_OUT: /* This is the first instruction of immediate next line of the last debug hit point. So next debug hit point should be when it comes to the "previousStackFrame" of the "stackFrame" relevant to the last debug hit point. So here that stack frame is saved and using intermediate step to wait until a instruction for that stack frame. */ debugContext.setCurrentCommand(DebugCommand.STEP_OUT_INTMDT); debugContext.setStackFrame(debugContext.getStackFrame().prevStackFrame); interMediateDebugCheck(currentExecLine, debugger, debugContext); break; case STEP_OUT_INTMDT: interMediateDebugCheck(currentExecLine, debugger, debugContext); break; default: logger.warn("invalid debug command, exiting from debugging"); debugger.notifyExit(); debugger.stopDebugging(); } } /** * Inter mediate debug check to avoid switch case falling through. * * @param currentExecLine Current execution line. * @param debugger Debugger object. * @param debugContext Current debug context. */ private void interMediateDebugCheck(LineNumberInfo currentExecLine, Debugger debugger, DebugContext debugContext) { if (controlStack.currentFrame != debugContext.getStackFrame()) { return; } debugHit(currentExecLine, debugger, debugContext); } /** * Helper method to check whether given point is a debug point or not. * If it's a debug point, then notify the debugger. * * @param currentExecLine Current execution line. * @param debugger Debugger object. * @param debugContext Current debug context. * @return Boolean true if it's a debug point, false otherwise. */ private boolean debugPointCheck(LineNumberInfo currentExecLine, Debugger debugger, DebugContext debugContext) { if (!currentExecLine.isDebugPoint()) { return false; } debugHit(currentExecLine, debugger, debugContext); return true; } /** * Helper method to set required details when a debug point hits. * And also to notify the debugger. * * @param currentExecLine Current execution line. * @param debugger Debugger object. * @param debugContext Current debug context. */ private void debugHit(LineNumberInfo currentExecLine, Debugger debugger, DebugContext debugContext) { if (!debugContext.isAtive() && !debugger.tryAcquireDebugSessionLock()) { return; } debugContext.setActive(true); debugContext.setLastLine(currentExecLine); debugContext.setStackFrame(controlStack.currentFrame); debugger.notifyDebugHit(controlStack.currentFrame, currentExecLine, debugContext.getThreadId()); debugger.waitTillDebuggeeResponds(); if (debugContext.getCurrentCommand() == DebugCommand.RESUME && debugContext.isAtive()) { debugContext.setActive(false); debugger.releaseDebugSessionLock(); } } private void handleAnyToRefTypeCast(StackFrame sf, int[] operands, BType targetType) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BRefType bRefType = sf.refRegs[i]; if (bRefType == null) { sf.refRegs[j] = null; sf.refRegs[k] = null; } else if (bRefType.getType() == targetType) { sf.refRegs[j] = bRefType; sf.refRegs[k] = null; } else { sf.refRegs[j] = null; handleTypeCastError(sf, k, bRefType.getType(), targetType); } } private void handleTypeCastError(StackFrame sf, int errorRegIndex, BType sourceType, BType targetType) { handleTypeCastError(sf, errorRegIndex, sourceType.toString(), targetType.toString()); } private void handleTypeCastError(StackFrame sf, int errorRegIndex, String sourceType, String targetType) { BStruct errorVal; errorVal = BLangVMErrors.createTypeCastError(context, ip, sourceType.toString(), targetType.toString()); if (errorRegIndex == -1) { context.setError(errorVal); handleError(); return; } sf.refRegs[errorRegIndex] = errorVal; } private void handleTypeConversionError(StackFrame sf, int errorRegIndex, String sourceTypeName, String targetTypeName) { String errorMsg = "'" + sourceTypeName + "' cannot be converted to '" + targetTypeName + "'"; handleTypeConversionError(sf, errorRegIndex, errorMsg, sourceTypeName, targetTypeName); } private void handleTypeConversionError(StackFrame sf, int errorRegIndex, String errorMessage, String sourceTypeName, String targetTypeName) { BStruct errorVal; errorVal = BLangVMErrors.createTypeConversionError(context, ip, errorMessage, sourceTypeName, targetTypeName); if (errorRegIndex == -1) { context.setError(errorVal); handleError(); return; } sf.refRegs[errorRegIndex] = errorVal; } private void createNewIntRange(int[] operands, StackFrame sf) { long startValue = sf.longRegs[operands[0]]; long endValue = sf.longRegs[operands[1]]; sf.refRegs[operands[2]] = new BIntRange(startValue, endValue); } private void createNewConnector(int[] operands, StackFrame sf) { int cpIndex = operands[0]; int i = operands[1]; StructureRefCPEntry structureRefCPEntry = (StructureRefCPEntry) constPool[cpIndex]; ConnectorInfo connectorInfo = (ConnectorInfo) structureRefCPEntry.getStructureTypeInfo(); BConnector bConnector = new BConnector(connectorInfo.getType()); sf.refRegs[i] = bConnector; } private void createNewStruct(int[] operands, StackFrame sf) { int cpIndex = operands[0]; int i = operands[1]; StructureRefCPEntry structureRefCPEntry = (StructureRefCPEntry) constPool[cpIndex]; StructInfo structInfo = (StructInfo) structureRefCPEntry.getStructureTypeInfo(); BStruct bStruct = new BStruct(structInfo.getType()); int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; for (StructFieldInfo fieldInfo : structInfo.getFieldInfoEntries()) { DefaultValueAttributeInfo defaultValueInfo = (DefaultValueAttributeInfo) fieldInfo.getAttributeInfo(AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE); switch (fieldInfo.getFieldType().getTag()) { case TypeTags.INT_TAG: longRegIndex++; if (defaultValueInfo != null) { bStruct.setIntField(longRegIndex, defaultValueInfo.getDefaultValue().getIntValue()); } break; case TypeTags.FLOAT_TAG: doubleRegIndex++; if (defaultValueInfo != null) { bStruct.setFloatField(doubleRegIndex, defaultValueInfo.getDefaultValue().getFloatValue()); } break; case TypeTags.STRING_TAG: stringRegIndex++; if (defaultValueInfo != null) { bStruct.setStringField(stringRegIndex, defaultValueInfo.getDefaultValue().getStringValue()); } break; case TypeTags.BOOLEAN_TAG: booleanRegIndex++; if (defaultValueInfo != null) { bStruct.setBooleanField(booleanRegIndex, defaultValueInfo.getDefaultValue().getBooleanValue() ? 1 : 0); } break; } } sf.refRegs[i] = bStruct; } private void endTransaction(int status) { BallerinaTransactionManager ballerinaTransactionManager = context.getBallerinaTransactionManager(); if (ballerinaTransactionManager != null) { try { if (status == TransactionStatus.SUCCESS.value()) { ballerinaTransactionManager.commitTransactionBlock(); } else if (status == TransactionStatus.FAILED.value()) { ballerinaTransactionManager.rollbackTransactionBlock(); } else { ballerinaTransactionManager.endTransactionBlock(); if (ballerinaTransactionManager.isOuterTransaction()) { context.setBallerinaTransactionManager(null); } } } catch (Throwable e) { context.setError(BLangVMErrors.createError(this.context, ip, e.getMessage())); handleError(); return; } } } private void beginTransaction(int transactionId, int retryCountRegIndex) { int retryCount = 3; if (retryCountRegIndex != -1) { retryCount = (int) controlStack.currentFrame.getLongRegs()[retryCountRegIndex]; if (retryCount < 0) { context.setError(BLangVMErrors.createError(this.context, ip, BLangExceptionHelper.getErrorMessage(RuntimeErrors.INVALID_RETRY_COUNT))); handleError(); return; } } BallerinaTransactionManager ballerinaTransactionManager = context.getBallerinaTransactionManager(); if (ballerinaTransactionManager == null) { ballerinaTransactionManager = new BallerinaTransactionManager(); context.setBallerinaTransactionManager(ballerinaTransactionManager); } ballerinaTransactionManager.beginTransactionBlock(transactionId, retryCount); } private void retryTransaction(int transactionId, int startOfAbortIP) { BallerinaTransactionManager ballerinaTransactionManager = context.getBallerinaTransactionManager(); int allowedRetryCount = ballerinaTransactionManager.getAllowedRetryCount(transactionId); int currentRetryCount = ballerinaTransactionManager.getCurrentRetryCount(transactionId); if (currentRetryCount >= allowedRetryCount) { if (currentRetryCount != 0) { ip = startOfAbortIP; } } ballerinaTransactionManager.incrementCurrentRetryCount(transactionId); } public void invokeCallableUnit(CallableUnitInfo callableUnitInfo, int[] argRegs, int[] retRegs) { BType[] paramTypes = callableUnitInfo.getParamTypes(); StackFrame callerSF = controlStack.currentFrame; WorkerInfo defaultWorkerInfo = callableUnitInfo.getDefaultWorkerInfo(); StackFrame calleeSF = new StackFrame(callableUnitInfo, defaultWorkerInfo, ip, retRegs); controlStack.pushFrame(calleeSF); copyArgValues(callerSF, calleeSF, argRegs, paramTypes); this.constPool = calleeSF.packageInfo.getConstPoolEntries(); this.code = calleeSF.packageInfo.getInstructions(); ip = defaultWorkerInfo.getCodeAttributeInfo().getCodeAddrs(); } public void invokeAction(String actionName, int[] argRegs, int[] retRegs) { StackFrame callerSF = controlStack.currentFrame; if (callerSF.refRegs[argRegs[0]] == null) { context.setError(BLangVMErrors.createNullRefError(this.context, ip)); handleError(); return; } BConnectorType actualCon = (BConnectorType) ((BConnector) callerSF.refRegs[argRegs[0]]).getConnectorType(); ActionInfo newActionInfo = programFile.getPackageInfo(actualCon.getPackagePath()) .getConnectorInfo(actualCon.getName()).getActionInfo(actionName); if (newActionInfo.isNative()) { invokeNativeAction(newActionInfo, argRegs, retRegs); } else { invokeCallableUnit(newActionInfo, argRegs, retRegs); } } public void handleWorkerSend(WorkerDataChannelInfo workerDataChannel, BType[] types, int[] regs) { StackFrame currentFrame = controlStack.currentFrame; BValue[] arguments = new BValue[types.length]; copyArgValuesForWorkerSend(currentFrame, regs, types, arguments); workerDataChannel.setTypes(types); workerDataChannel.putData(arguments); } public void invokeForkJoin(InstructionFORKJOIN forkJoinIns) { ForkjoinInfo forkjoinInfo = forkJoinIns.forkJoinCPEntry.getForkjoinInfo(); List<BLangVMWorkers.WorkerExecutor> workerRunnerList = new ArrayList<>(); long timeout = Long.MAX_VALUE; if (forkjoinInfo.isTimeoutAvailable()) { timeout = this.controlStack.currentFrame.getLongRegs()[forkJoinIns.timeoutRegIndex]; } Queue<WorkerResult> resultMsgs = new ConcurrentLinkedQueue<>(); Map<String, BLangVMWorkers.WorkerExecutor> workers = new HashMap<>(); for (WorkerInfo workerInfo : forkjoinInfo.getWorkerInfoMap().values()) { Context workerContext = new WorkerContext(this.programFile, context); workerContext.blockingInvocation = true; StackFrame callerSF = this.controlStack.currentFrame; int[] argRegs = forkjoinInfo.getArgRegs(); ControlStack workerControlStack = workerContext.getControlStack(); StackFrame calleeSF = new StackFrame(this.controlStack.currentFrame.getCallableUnitInfo(), workerInfo, -1, new int[1]); workerControlStack.pushFrame(calleeSF); BLangVM.copyValuesForForkJoin(callerSF, calleeSF, argRegs); BLangVM bLangVM = new BLangVM(this.programFile); BLangVMWorkers.WorkerExecutor workerRunner = new BLangVMWorkers.WorkerExecutor(bLangVM, workerContext, workerInfo, resultMsgs); workerRunnerList.add(workerRunner); workerContext.startTrackWorker(); workers.put(workerInfo.getWorkerName(), workerRunner); } Set<String> joinWorkerNames = new LinkedHashSet<>(Lists.of(forkjoinInfo.getJoinWorkerNames())); if (joinWorkerNames.isEmpty()) { /* if no join workers are specified, that means, all should be considered */ joinWorkerNames.addAll(workers.keySet()); } int workerCount; if (forkjoinInfo.getJoinType().equalsIgnoreCase(JOIN_TYPE_SOME)) { workerCount = forkjoinInfo.getWorkerCount(); } else { workerCount = joinWorkerNames.size(); } boolean success = this.invokeJoinWorkers(workers, joinWorkerNames, workerCount, timeout); if (success) { this.ip = forkJoinIns.joinBlockAddr; /* assign values to join block message arrays */ int offsetJoin = forkJoinIns.joinVarRegIndex; BMap<String, BRefValueArray> mbMap = new BMap<>(); for (WorkerResult workerResult : resultMsgs) { mbMap.put(workerResult.getWorkerName(), workerResult.getResult()); } this.controlStack.currentFrame.getRefRegs()[offsetJoin] = mbMap; } else { /* timed out */ this.ip = forkJoinIns.timeoutBlockAddr; /* execute the timeout block */ int offsetTimeout = forkJoinIns.timeoutVarRegIndex; BMap<String, BRefValueArray> mbMap = new BMap<>(); for (WorkerResult workerResult : resultMsgs) { mbMap.put(workerResult.getWorkerName(), workerResult.getResult()); } this.controlStack.currentFrame.getRefRegs()[offsetTimeout] = mbMap; } } private boolean invokeJoinWorkers(Map<String, BLangVMWorkers.WorkerExecutor> workers, Set<String> joinWorkerNames, int joinCount, long timeout) { ExecutorService exec = ThreadPoolFactory.getInstance().getWorkerExecutor(); Semaphore resultCounter = new Semaphore(-joinCount + 1); workers.forEach((k, v) -> { if (joinWorkerNames.contains(k)) { v.setResultCounterSemaphore(resultCounter); } exec.submit(v); }); try { return resultCounter.tryAcquire(timeout, TimeUnit.SECONDS); } catch (InterruptedException ignore) { return false; } } private void startWorkers() { CallableUnitInfo callableUnitInfo = this.controlStack.currentFrame.callableUnitInfo; BLangVMWorkers.invoke(programFile, callableUnitInfo, this.context); } private void handleWorkerReturn() { WorkerContext workerContext = (WorkerContext) this.context; if (workerContext.parentSF.tryReturn()) { StackFrame workerCallerSF = workerContext.getControlStack().currentFrame; workerContext.parentSF.returnedWorker = workerCallerSF.workerInfo.getWorkerName(); StackFrame parentSF = workerContext.parentSF; copyWorkersReturnValues(workerCallerSF, parentSF); this.context = workerContext.parent; this.controlStack = this.context.getControlStack(); controlStack.popFrame(); this.constPool = this.controlStack.currentFrame.packageInfo.getConstPoolEntries(); this.code = this.controlStack.currentFrame.packageInfo.getInstructions(); ip = parentSF.retAddrs; } else { String msg = workerContext.parentSF.returnedWorker + " already returned."; context.setError(BLangVMErrors.createIllegalStateException(context, ip, msg)); handleError(); } } public void handleWorkerReceive(WorkerDataChannelInfo workerDataChannel, BType[] types, int[] regs) { BValue[] passedInValues = (BValue[]) workerDataChannel.takeData(); StackFrame currentFrame = controlStack.currentFrame; copyArgValuesForWorkerReceive(currentFrame, regs, types, passedInValues); } public static void copyArgValuesForWorkerSend(StackFrame callerSF, int[] argRegs, BType[] paramTypes, BValue[] arguments) { for (int i = 0; i < argRegs.length; i++) { BType paramType = paramTypes[i]; int argReg = argRegs[i]; switch (paramType.getTag()) { case TypeTags.INT_TAG: arguments[i] = new BInteger(callerSF.longRegs[argReg]); break; case TypeTags.FLOAT_TAG: arguments[i] = new BFloat(callerSF.doubleRegs[argReg]); break; case TypeTags.STRING_TAG: arguments[i] = new BString(callerSF.stringRegs[argReg]); break; case TypeTags.BOOLEAN_TAG: arguments[i] = new BBoolean(callerSF.intRegs[argReg] > 0); break; case TypeTags.BLOB_TAG: arguments[i] = new BBlob(callerSF.byteRegs[argReg]); break; default: arguments[i] = callerSF.refRegs[argReg]; } } } public static void copyArgValuesForWorkerReceive(StackFrame currentSF, int[] argRegs, BType[] paramTypes, BValue[] passedInValues) { for (int i = 0; i < argRegs.length; i++) { int regIndex = argRegs[i]; BType paramType = paramTypes[i]; switch (paramType.getTag()) { case TypeTags.INT_TAG: currentSF.getLongRegs()[regIndex] = ((BInteger) passedInValues[i]).intValue(); break; case TypeTags.FLOAT_TAG: currentSF.getDoubleRegs()[regIndex] = ((BFloat) passedInValues[i]).floatValue(); break; case TypeTags.STRING_TAG: currentSF.getStringRegs()[regIndex] = (passedInValues[i]).stringValue(); break; case TypeTags.BOOLEAN_TAG: currentSF.getIntRegs()[regIndex] = (((BBoolean) passedInValues[i]).booleanValue()) ? 1 : 0; break; case TypeTags.BLOB_TAG: currentSF.getByteRegs()[regIndex] = ((BBlob) passedInValues[i]).blobValue(); break; default: currentSF.getRefRegs()[regIndex] = (BRefType) passedInValues[i]; } } } public static void copyValuesForForkJoin(StackFrame callerSF, StackFrame calleeSF, int[] argRegs) { int longLocalVals = argRegs[0]; int doubleLocalVals = argRegs[1]; int stringLocalVals = argRegs[2]; int booleanLocalVals = argRegs[3]; int blobLocalVals = argRegs[4]; int refLocalVals = argRegs[5]; for (int i = 0; i <= longLocalVals; i++) { calleeSF.getLongRegs()[i] = callerSF.getLongRegs()[i]; } for (int i = 0; i <= doubleLocalVals; i++) { calleeSF.getDoubleRegs()[i] = callerSF.getDoubleRegs()[i]; } for (int i = 0; i <= stringLocalVals; i++) { calleeSF.getStringRegs()[i] = callerSF.getStringRegs()[i]; } for (int i = 0; i <= booleanLocalVals; i++) { calleeSF.getIntRegs()[i] = callerSF.getIntRegs()[i]; } for (int i = 0; i <= refLocalVals; i++) { calleeSF.getRefRegs()[i] = callerSF.getRefRegs()[i]; } for (int i = 0; i <= blobLocalVals; i++) { calleeSF.getByteRegs()[i] = callerSF.getByteRegs()[i]; } } public static void copyValues(StackFrame parent, StackFrame workerSF) { CodeAttributeInfo codeInfo = parent.callableUnitInfo.getDefaultWorkerInfo().getCodeAttributeInfo(); System.arraycopy(parent.longRegs, 0, workerSF.longRegs, 0, codeInfo.getMaxLongLocalVars()); System.arraycopy(parent.doubleRegs, 0, workerSF.doubleRegs, 0, codeInfo.getMaxDoubleLocalVars()); System.arraycopy(parent.intRegs, 0, workerSF.intRegs, 0, codeInfo.getMaxIntLocalVars()); System.arraycopy(parent.stringRegs, 0, workerSF.stringRegs, 0, codeInfo.getMaxStringLocalVars()); System.arraycopy(parent.byteRegs, 0, workerSF.byteRegs, 0, codeInfo.getMaxByteLocalVars()); System.arraycopy(parent.refRegs, 0, workerSF.refRegs, 0, codeInfo.getMaxRefLocalVars()); } public static void copyArgValues(StackFrame callerSF, StackFrame calleeSF, int[] argRegs, BType[] paramTypes) { int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; int refRegIndex = -1; int blobRegIndex = -1; for (int i = 0; i < argRegs.length; i++) { BType paramType = paramTypes[i]; int argReg = argRegs[i]; switch (paramType.getTag()) { case TypeTags.INT_TAG: calleeSF.longRegs[++longRegIndex] = callerSF.longRegs[argReg]; break; case TypeTags.FLOAT_TAG: calleeSF.doubleRegs[++doubleRegIndex] = callerSF.doubleRegs[argReg]; break; case TypeTags.STRING_TAG: calleeSF.stringRegs[++stringRegIndex] = callerSF.stringRegs[argReg]; break; case TypeTags.BOOLEAN_TAG: calleeSF.intRegs[++booleanRegIndex] = callerSF.intRegs[argReg]; break; case TypeTags.BLOB_TAG: calleeSF.byteRegs[++blobRegIndex] = callerSF.byteRegs[argReg]; break; default: calleeSF.refRegs[++refRegIndex] = callerSF.refRegs[argReg]; } } } private void handleReturn() { StackFrame currentSF = controlStack.popFrame(); if (controlStack.currentFrame != null) { StackFrame callersSF = controlStack.currentFrame; this.constPool = callersSF.packageInfo.getConstPoolEntries(); this.code = callersSF.packageInfo.getInstructions(); } ip = currentSF.retAddrs; } private void copyWorkersReturnValues(StackFrame workerSF, StackFrame parentsSF) { int callersRetRegIndex; int longRegCount = 0; int doubleRegCount = 0; int stringRegCount = 0; int intRegCount = 0; int refRegCount = 0; int byteRegCount = 0; StackFrame workerCallerSF = workerSF.prevStackFrame; StackFrame parentCallersSF = parentsSF.prevStackFrame; BType[] retTypes = parentsSF.getCallableUnitInfo().getRetParamTypes(); for (int i = 0; i < retTypes.length; i++) { BType retType = retTypes[i]; callersRetRegIndex = parentsSF.retRegIndexes[i]; switch (retType.getTag()) { case TypeTags.INT_TAG: parentCallersSF.longRegs[callersRetRegIndex] = workerCallerSF.longRegs[longRegCount++]; break; case TypeTags.FLOAT_TAG: parentCallersSF.doubleRegs[callersRetRegIndex] = workerCallerSF.doubleRegs[doubleRegCount++]; break; case TypeTags.STRING_TAG: parentCallersSF.stringRegs[callersRetRegIndex] = workerCallerSF.stringRegs[stringRegCount++]; break; case TypeTags.BOOLEAN_TAG: parentCallersSF.intRegs[callersRetRegIndex] = workerCallerSF.intRegs[intRegCount++]; break; case TypeTags.BLOB_TAG: parentCallersSF.byteRegs[callersRetRegIndex] = workerCallerSF.byteRegs[byteRegCount++]; break; default: parentCallersSF.refRegs[callersRetRegIndex] = workerCallerSF.refRegs[refRegCount++]; break; } } } private String getOperandsLine(int[] operands) { if (operands.length == 0) { return ""; } if (operands.length == 1) { return "" + operands[0]; } StringBuilder sb = new StringBuilder(); sb.append(operands[0]); for (int i = 1; i < operands.length; i++) { sb.append(" "); sb.append(operands[i]); } return sb.toString(); } private void invokeNativeFunction(FunctionInfo functionInfo, int[] argRegs, int[] retRegs) { StackFrame callerSF = controlStack.currentFrame; BType[] retTypes = functionInfo.getRetParamTypes(); BValue[] returnValues = new BValue[retTypes.length]; StackFrame caleeSF = new StackFrame(functionInfo, functionInfo.getDefaultWorkerInfo(), ip, null, returnValues); copyArgValues(callerSF, caleeSF, argRegs, functionInfo.getParamTypes()); controlStack.pushFrame(caleeSF); AbstractNativeFunction nativeFunction = functionInfo.getNativeFunction(); try { nativeFunction.executeNative(context); } catch (BLangNullReferenceException e) { context.setError(BLangVMErrors.createNullRefError(context, ip)); handleError(); return; } catch (Throwable e) { context.setError(BLangVMErrors.createError(this.context, ip, e.getMessage())); handleError(); return; } controlStack.popFrame(); handleReturnFromNativeCallableUnit(callerSF, retRegs, returnValues, retTypes); } private void invokeNativeAction(ActionInfo actionInfo, int[] argRegs, int[] retRegs) { StackFrame callerSF = controlStack.currentFrame; WorkerInfo defaultWorkerInfo = actionInfo.getDefaultWorkerInfo(); AbstractNativeAction nativeAction = actionInfo.getNativeAction(); if (nativeAction == null) { return; } BType[] retTypes = actionInfo.getRetParamTypes(); BValue[] returnValues = new BValue[retTypes.length]; StackFrame caleeSF = new StackFrame(actionInfo, defaultWorkerInfo, ip, null, returnValues); copyArgValues(callerSF, caleeSF, argRegs, actionInfo.getParamTypes()); controlStack.pushFrame(caleeSF); try { boolean nonBlocking = !context.isInTransaction() && nativeAction.isNonBlockingAction() && !context.blockingInvocation; BClientConnectorFutureListener listener = new BClientConnectorFutureListener(context, nonBlocking); if (nonBlocking) { context.setStartIP(ip); if (caleeSF.packageInfo == null) { caleeSF.packageInfo = actionInfo.getPackageInfo(); } context.nonBlockingContext = new Context.NonBlockingContext(actionInfo, retRegs); ConnectorFuture future = nativeAction.execute(context); if (future == null) { throw new BallerinaException("Native action doesn't provide a future object to sync"); } future.setConnectorFutureListener(listener); ip = -1; } else { ConnectorFuture future = nativeAction.execute(context); if (future == null) { throw new BallerinaException("Native action doesn't provide a future object to sync"); } future.setConnectorFutureListener(listener); long timeout = 300000; boolean res = listener.sync(timeout); if (!res) { throw new BallerinaException("Action execution timed out, timeout period - " + timeout + ", Action - " + nativeAction.getPackagePath() + ":" + nativeAction.getName()); } if (context.getError() != null) { handleError(); } controlStack.popFrame(); handleReturnFromNativeCallableUnit(callerSF, retRegs, returnValues, retTypes); } } catch (Throwable e) { context.setError(BLangVMErrors.createError(this.context, ip, e.getMessage())); handleError(); } } public static void handleReturnFromNativeCallableUnit(StackFrame callerSF, int[] returnRegIndexes, BValue[] returnValues, BType[] retTypes) { for (int i = 0; i < returnValues.length; i++) { int callersRetRegIndex = returnRegIndexes[i]; BType retType = retTypes[i]; switch (retType.getTag()) { case TypeTags.INT_TAG: if (returnValues[i] == null) { callerSF.longRegs[callersRetRegIndex] = 0; break; } callerSF.longRegs[callersRetRegIndex] = ((BInteger) returnValues[i]).intValue(); break; case TypeTags.FLOAT_TAG: if (returnValues[i] == null) { callerSF.doubleRegs[callersRetRegIndex] = 0; break; } callerSF.doubleRegs[callersRetRegIndex] = ((BFloat) returnValues[i]).floatValue(); break; case TypeTags.STRING_TAG: if (returnValues[i] == null) { callerSF.stringRegs[callersRetRegIndex] = STRING_NULL_VALUE; break; } callerSF.stringRegs[callersRetRegIndex] = returnValues[i].stringValue(); break; case TypeTags.BOOLEAN_TAG: if (returnValues[i] == null) { callerSF.intRegs[callersRetRegIndex] = 0; break; } callerSF.intRegs[callersRetRegIndex] = ((BBoolean) returnValues[i]).booleanValue() ? 1 : 0; break; case TypeTags.BLOB_TAG: if (returnValues[i] == null) { callerSF.byteRegs[callersRetRegIndex] = new byte[0]; break; } callerSF.byteRegs[callersRetRegIndex] = ((BBlob) returnValues[i]).blobValue(); break; default: callerSF.refRegs[callersRetRegIndex] = (BRefType) returnValues[i]; } } } private boolean checkCast(BValue sourceValue, BType targetType) { BType sourceType = sourceValue.getType(); if (sourceType.equals(targetType)) { return true; } if (sourceType.getTag() == TypeTags.STRUCT_TAG && targetType.getTag() == TypeTags.STRUCT_TAG) { return checkStructEquivalency((BStructType) sourceType, (BStructType) targetType); } if (targetType.getTag() == TypeTags.ANY_TAG) { return true; } if (getElementType(sourceType).getTag() == TypeTags.JSON_TAG) { return checkJSONCast(((BJSON) sourceValue).value(), sourceType, targetType); } if (targetType.getTag() == TypeTags.ARRAY_TAG || sourceType.getTag() == TypeTags.ARRAY_TAG) { return checkArrayCast(sourceType, targetType); } return false; } private boolean checkArrayCast(BType sourceType, BType targetType) { if (targetType.getTag() == TypeTags.ARRAY_TAG && sourceType.getTag() == TypeTags.ARRAY_TAG) { BArrayType sourceArrayType = (BArrayType) sourceType; BArrayType targetArrayType = (BArrayType) targetType; if (targetArrayType.getDimensions() > sourceArrayType.getDimensions()) { return false; } return checkArrayCast(sourceArrayType.getElementType(), targetArrayType.getElementType()); } else if (sourceType.getTag() == TypeTags.ARRAY_TAG) { return targetType.getTag() == TypeTags.ANY_TAG; } return sourceType.equals(targetType); } private BType getElementType(BType type) { if (type.getTag() != TypeTags.ARRAY_TAG) { return type; } return getElementType(((BArrayType) type).getElementType()); } public static boolean checkStructEquivalency(BStructType sourceType, BStructType targetType) { BStructType.StructField[] sFields = sourceType.getStructFields(); BStructType.StructField[] tFields = targetType.getStructFields(); if (tFields.length > sFields.length) { return false; } for (int i = 0; i < tFields.length; i++) { if (isAssignable(tFields[i].getFieldType(), sFields[i].getFieldType()) && tFields[i].getFieldName().equals(sFields[i].getFieldName())) { continue; } return false; } return true; } private static boolean isAssignable(BType actualType, BType expType) { if (actualType == expType) { return true; } if (actualType.getTag() == expType.getTag() && isValueType(actualType)) { return true; } else if (actualType.getTag() == expType.getTag() && !isUserDefinedType(actualType) && !isConstrainedType(actualType)) { return true; } else if (actualType.getTag() == expType.getTag() && actualType.getTag() == TypeTags.ARRAY_TAG) { return checkArrayEquivalent(actualType, expType); } else if (actualType.getTag() == expType.getTag() && actualType.getTag() == TypeTags.STRUCT_TAG && checkStructEquivalency((BStructType) actualType, (BStructType) expType)) { return true; } return false; } private static boolean isValueType(BType type) { return type.getTag() <= TypeTags.BLOB_TAG; } private static boolean isUserDefinedType(BType type) { return type.getTag() == TypeTags.STRUCT_TAG || type.getTag() == TypeTags.CONNECTOR_TAG || type.getTag() == TypeTags.ENUM_TAG || type.getTag() == TypeTags.ARRAY_TAG; } private static boolean isConstrainedType(BType type) { return type.getTag() == TypeTags.JSON_TAG; } private static boolean checkArrayEquivalent(BType actualType, BType expType) { if (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) { BArrayType lhrArrayType = (BArrayType) expType; BArrayType rhsArrayType = (BArrayType) actualType; return checkArrayEquivalent(lhrArrayType.getElementType(), rhsArrayType.getElementType()); } if (expType == actualType) { return true; } return false; } private void castJSONToInt(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeInt, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isLong()) { sf.longRegs[j] = jsonNode.longValue(); sf.refRegs[k] = null; return; } sf.longRegs[j] = 0; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.INT_TNAME); } private void castJSONToFloat(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeFloat, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isDouble()) { sf.doubleRegs[j] = jsonNode.doubleValue(); sf.refRegs[k] = null; return; } sf.doubleRegs[j] = 0; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.FLOAT_TNAME); } private void castJSONToString(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { sf.stringRegs[j] = ""; String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeString, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isString()) { sf.stringRegs[j] = jsonNode.stringValue(); sf.refRegs[k] = null; return; } sf.stringRegs[j] = STRING_NULL_VALUE; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.STRING_TNAME); } private void castJSONToBoolean(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BJSON jsonValue = (BJSON) sf.refRegs[i]; if (jsonValue == null) { handleNullRefError(); return; } JsonNode jsonNode; try { jsonNode = jsonValue.value(); } catch (BallerinaException e) { String errorMsg = BLangExceptionHelper.getErrorMessage(RuntimeErrors.CASTING_FAILED_WITH_CAUSE, BTypes.typeJSON, BTypes.typeBoolean, e.getMessage()); context.setError(BLangVMErrors.createError(context, ip, errorMsg)); handleError(); return; } if (jsonNode.isBoolean()) { sf.intRegs[j] = jsonNode.booleanValue() ? 1 : 0; sf.refRegs[k] = null; return; } sf.intRegs[j] = 0; handleTypeCastError(sf, k, JSONUtils.getTypeName(jsonNode), TypeConstants.BOOLEAN_TNAME); } private boolean checkJSONEquivalency(JsonNode json, BJSONType sourceType, BJSONType targetType) { BStructType sourceConstrainedType = (BStructType) sourceType.getConstrainedType(); BStructType targetConstrainedType = (BStructType) targetType.getConstrainedType(); if (targetConstrainedType == null) { return true; } if (sourceConstrainedType != null) { if (sourceConstrainedType.equals(targetConstrainedType)) { return true; } return checkStructEquivalency(sourceConstrainedType, targetConstrainedType); } BStructType.StructField[] tFields = targetConstrainedType.getStructFields(); for (int i = 0; i < tFields.length; i++) { String fieldName = tFields[i].getFieldName(); if (!json.has(fieldName)) { return false; } if (!checkJSONCast(json.get(fieldName), sourceType, tFields[i].getFieldType())) { return false; } } return true; } /** * Check the compatibility of casting a JSON to a target type. * * @param json JSON to cast * @param sourceType Type of the source JSON * @param targetType Target type * @return Runtime compatibility for casting */ private boolean checkJSONCast(JsonNode json, BType sourceType, BType targetType) { switch (targetType.getTag()) { case TypeTags.STRING_TAG: return json.isString(); case TypeTags.INT_TAG: return json.isLong(); case TypeTags.FLOAT_TAG: return json.isDouble(); case TypeTags.ARRAY_TAG: if (!json.isArray()) { return false; } BArrayType arrayType = (BArrayType) targetType; for (int i = 0; i < json.size(); i++) { BType sourceElementType = sourceType.getTag() == TypeTags.ARRAY_TAG ? ((BArrayType) sourceType).getElementType() : sourceType; if (!checkJSONCast(json.get(i), sourceElementType, arrayType.getElementType())) { return false; } } return true; case TypeTags.JSON_TAG: if (sourceType.getTag() != TypeTags.JSON_TAG) { return false; } return checkJSONEquivalency(json, (BJSONType) sourceType, (BJSONType) targetType); default: return false; } } private void convertStructToMap(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; BStruct bStruct = (BStruct) sf.refRegs[i]; if (bStruct == null) { sf.refRegs[j] = null; return; } int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; int blobRegIndex = -1; int refRegIndex = -1; BStructType.StructField[] structFields = ((BStructType) bStruct.getType()).getStructFields(); BMap<String, BValue> map = BTypes.typeMap.getEmptyValue(); for (BStructType.StructField structField : structFields) { String key = structField.getFieldName(); BType fieldType = structField.getFieldType(); switch (fieldType.getTag()) { case TypeTags.INT_TAG: map.put(key, new BInteger(bStruct.getIntField(++longRegIndex))); break; case TypeTags.FLOAT_TAG: map.put(key, new BFloat(bStruct.getFloatField(++doubleRegIndex))); break; case TypeTags.STRING_TAG: map.put(key, new BString(bStruct.getStringField(++stringRegIndex))); break; case TypeTags.BOOLEAN_TAG: map.put(key, new BBoolean(bStruct.getBooleanField(++booleanRegIndex) == 1)); break; case TypeTags.BLOB_TAG: map.put(key, new BBlob(bStruct.getBlobField(++blobRegIndex))); break; default: BValue value = bStruct.getRefField(++refRegIndex); map.put(key, value == null ? null : value.copy()); } } sf.refRegs[j] = map; } private void convertStructToJSON(int[] operands, StackFrame sf) { int i = operands[0]; int j = operands[1]; int k = operands[2]; BStruct bStruct = (BStruct) sf.refRegs[i]; if (bStruct == null) { sf.refRegs[j] = null; return; } try { sf.refRegs[j] = JSONUtils.convertStructToJSON(bStruct); } catch (Exception e) { sf.refRegs[j] = null; String errorMsg = "cannot convert '" + bStruct.getType() + "' to type '" + BTypes.typeJSON + "': " + e.getMessage(); handleTypeConversionError(sf, k, errorMsg, bStruct.getType().toString(), TypeConstants.JSON_TNAME); } } private void convertMapToStruct(int[] operands, StackFrame sf) { int i = operands[0]; int cpIndex = operands[1]; int j = operands[2]; int k = operands[3]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; BMap<String, BValue> bMap = (BMap<String, BValue>) sf.refRegs[i]; if (bMap == null) { sf.refRegs[j] = null; return; } int longRegIndex = -1; int doubleRegIndex = -1; int stringRegIndex = -1; int booleanRegIndex = -1; int blobRegIndex = -1; int refRegIndex = -1; BStructType structType = (BStructType) typeRefCPEntry.getType(); BStruct bStruct = new BStruct(structType); StructInfo structInfo = sf.packageInfo.getStructInfo(structType.getName()); Set<String> keys = bMap.keySet(); for (StructFieldInfo fieldInfo : structInfo.getFieldInfoEntries()) { String key = fieldInfo.getName(); BType fieldType = fieldInfo.getFieldType(); BValue mapVal = null; try { boolean containsField = keys.contains(key); DefaultValueAttributeInfo defaultValAttrInfo = null; if (containsField) { mapVal = bMap.get(key); if (mapVal == null && BTypes.isValueType(fieldType)) { throw BLangExceptionHelper.getRuntimeException( RuntimeErrors.INCOMPATIBLE_FIELD_TYPE_FOR_CASTING, key, fieldType, null); } if (mapVal != null && !checkCast(mapVal, fieldType)) { throw BLangExceptionHelper.getRuntimeException( RuntimeErrors.INCOMPATIBLE_FIELD_TYPE_FOR_CASTING, key, fieldType, mapVal.getType()); } } else { defaultValAttrInfo = (DefaultValueAttributeInfo) getAttributeInfo(fieldInfo, AttributeInfo.Kind.DEFAULT_VALUE_ATTRIBUTE); } switch (fieldType.getTag()) { case TypeTags.INT_TAG: longRegIndex++; if (containsField) { bStruct.setIntField(longRegIndex, ((BInteger) mapVal).intValue()); } else if (defaultValAttrInfo != null) { bStruct.setIntField(longRegIndex, defaultValAttrInfo.getDefaultValue().getIntValue()); } break; case TypeTags.FLOAT_TAG: doubleRegIndex++; if (containsField) { bStruct.setFloatField(doubleRegIndex, ((BFloat) mapVal).floatValue()); } else if (defaultValAttrInfo != null) { bStruct.setFloatField(doubleRegIndex, defaultValAttrInfo.getDefaultValue().getFloatValue()); } break; case TypeTags.STRING_TAG: stringRegIndex++; if (containsField) { bStruct.setStringField(stringRegIndex, ((BString) mapVal).stringValue()); } else if (defaultValAttrInfo != null) { bStruct.setStringField(stringRegIndex, defaultValAttrInfo.getDefaultValue().getStringValue()); } break; case TypeTags.BOOLEAN_TAG: booleanRegIndex++; if (containsField) { bStruct.setBooleanField(booleanRegIndex, ((BBoolean) mapVal).booleanValue() ? 1 : 0); } else if (defaultValAttrInfo != null) { bStruct.setBooleanField(booleanRegIndex, defaultValAttrInfo.getDefaultValue().getBooleanValue() ? 1 : 0); } break; case TypeTags.BLOB_TAG: blobRegIndex++; if (containsField && mapVal != null) { bStruct.setBlobField(blobRegIndex, ((BBlob) mapVal).blobValue()); } break; default: bStruct.setRefField(++refRegIndex, (BRefType) mapVal); } } catch (BallerinaException e) { sf.refRegs[j] = null; String errorMsg = "cannot convert '" + bMap.getType() + "' to type '" + structType + ": " + e.getMessage(); handleTypeConversionError(sf, k, errorMsg, TypeConstants.MAP_TNAME, structType.toString()); return; } } sf.refRegs[j] = bStruct; sf.refRegs[k] = null; } private void convertJSONToStruct(int[] operands, StackFrame sf) { int i = operands[0]; int cpIndex = operands[1]; int j = operands[2]; int k = operands[3]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; BJSON bjson = (BJSON) sf.refRegs[i]; if (bjson == null) { sf.refRegs[j] = null; return; } try { sf.refRegs[j] = JSONUtils.convertJSONToStruct(bjson, (BStructType) typeRefCPEntry.getType(), sf.packageInfo); sf.refRegs[k] = null; } catch (Exception e) { sf.refRegs[j] = null; String errorMsg = "cannot convert '" + TypeConstants.JSON_TNAME + "' to type '" + typeRefCPEntry.getType() + "': " + e.getMessage(); handleTypeConversionError(sf, k, errorMsg, TypeConstants.JSON_TNAME, typeRefCPEntry.getType().toString()); } } private void handleNullRefError() { context.setError(BLangVMErrors.createNullRefError(context, ip)); handleError(); } private void handleError() { int currentIP = ip - 1; StackFrame currentFrame = controlStack.currentFrame; ErrorTableEntry match = null; while (controlStack.currentFrame != null) { match = ErrorTableEntry.getMatch(currentFrame.packageInfo, currentIP, context.getError()); if (match != null) { break; } controlStack.popFrame(); context.setError(currentFrame.errorThrown); if (controlStack.currentFrame == null) { break; } currentIP = currentFrame.retAddrs - 1; currentFrame = controlStack.currentFrame; } if (controlStack.currentFrame == null) { ip = -1; if (context.getServiceInfo() == null) { return; } BServerConnectorFuture connectorFuture = context.getConnectorFuture(); try { connectorFuture.notifyFailure(new BallerinaException(BLangVMErrors .getPrintableStackTrace(context.getError()))); } catch (Exception e) { logger.error("cannot handle error using the error handler: " + e.getMessage(), e); } return; } if (match != null) { PackageInfo packageInfo = currentFrame.packageInfo; this.constPool = packageInfo.getConstPoolEntries(); this.code = packageInfo.getInstructions(); ip = match.getIpTarget(); return; } ip = -1; logger.error("fatal error. incorrect error table entry."); } private AttributeInfo getAttributeInfo(AttributeInfoPool attrInfoPool, AttributeInfo.Kind attrInfoKind) { for (AttributeInfo attributeInfo : attrInfoPool.getAttributeInfoEntries()) { if (attributeInfo.getKind() == attrInfoKind) { return attributeInfo; } } return null; } private boolean isWaitingOnNonBlockingAction() { return context.nonBlockingContext != null; } private void calculateLength(int[] operands, StackFrame sf) { int i = operands[0]; int cpIndex = operands[1]; int j = operands[2]; TypeRefCPEntry typeRefCPEntry = (TypeRefCPEntry) constPool[cpIndex]; int typeTag = typeRefCPEntry.getType().getTag(); if (typeTag == TypeTags.STRING_TAG) { String value = sf.stringRegs[i]; if (value == null) { handleNullRefError(); } else { sf.longRegs[j] = value.length(); } return; } else if (typeTag == TypeTags.BLOB_TAG) { sf.longRegs[j] = sf.byteRegs[i].length; return; } BValue entity = sf.refRegs[i]; if (entity == null) { handleNullRefError(); return; } if (typeTag == TypeTags.XML_TAG) { sf.longRegs[j] = ((BXML) entity).length(); return; } else if (entity instanceof BJSON) { if (JSONUtils.isJSONArray((BJSON) entity)) { sf.longRegs[j] = JSONUtils.getJSONArrayLength((BJSON) sf.refRegs[i]); } else { sf.longRegs[j] = -1; } return; } else if (typeTag == TypeTags.MAP_TAG) { sf.longRegs[j] = ((BMap) entity).size(); return; } BNewArray newArray = (BNewArray) entity; sf.longRegs[j] = newArray.size(); return; } }
NO. `sqlBlockRule ` in this place, is from `SqlBlockRule sqlBlockRule = SqlBlockRule.fromAlterStmt(stmt);` and `stmt` is an instance of AlterSqlBlockRuleStmt. In the method called `setProperties` in AlterSqlBlockRuleStmt.java, all the properties are set default values. And I've tested ALTER: 1. from lower version to this version, it is compatible; 2. ALTER operation work well 3. ALTER operatio can report relevant exceptions
public void alterSqlBlockRule(AlterSqlBlockRuleStmt stmt) throws AnalysisException, DdlException { writeLock(); try { SqlBlockRule sqlBlockRule = SqlBlockRule.fromAlterStmt(stmt); String ruleName = sqlBlockRule.getName(); if (!existRule(ruleName)) { throw new DdlException("the sql block rule " + ruleName + " not exist"); } SqlBlockRule originRule = nameToSqlBlockRuleMap.get(ruleName); SqlBlockUtil.checkAlterValidate(sqlBlockRule, originRule); if (StringUtils.isEmpty(sqlBlockRule.getSql())) { sqlBlockRule.setSql(originRule.getSql()); } if (StringUtils.isEmpty(sqlBlockRule.getSqlHash())) { sqlBlockRule.setSqlHash(originRule.getSqlHash()); } if (StringUtils.isEmpty(sqlBlockRule.getPartitionNum().toString())) { sqlBlockRule.setPartitionNum(originRule.getPartitionNum()); } if (StringUtils.isEmpty(sqlBlockRule.getTabletNum().toString())) { sqlBlockRule.setTabletNum(originRule.getTabletNum()); } if (StringUtils.isEmpty(sqlBlockRule.getCardinality().toString())) { sqlBlockRule.setCardinality(originRule.getCardinality()); } if (sqlBlockRule.getGlobal() == null) { sqlBlockRule.setGlobal(originRule.getGlobal()); } if (sqlBlockRule.getEnable() == null) { sqlBlockRule.setEnable(originRule.getEnable()); } unprotectedUpdate(sqlBlockRule); Catalog.getCurrentCatalog().getEditLog().logAlterSqlBlockRule(sqlBlockRule); } finally { writeUnlock(); } }
if (StringUtils.isEmpty(sqlBlockRule.getPartitionNum().toString())) {
public void alterSqlBlockRule(AlterSqlBlockRuleStmt stmt) throws AnalysisException, DdlException { writeLock(); try { SqlBlockRule sqlBlockRule = SqlBlockRule.fromAlterStmt(stmt); String ruleName = sqlBlockRule.getName(); if (!existRule(ruleName)) { throw new DdlException("the sql block rule " + ruleName + " not exist"); } SqlBlockRule originRule = nameToSqlBlockRuleMap.get(ruleName); SqlBlockUtil.checkAlterValidate(sqlBlockRule, originRule); if (StringUtils.isEmpty(sqlBlockRule.getSql())) { sqlBlockRule.setSql(originRule.getSql()); } if (StringUtils.isEmpty(sqlBlockRule.getSqlHash())) { sqlBlockRule.setSqlHash(originRule.getSqlHash()); } if (StringUtils.isEmpty(sqlBlockRule.getPartitionNum().toString())) { sqlBlockRule.setPartitionNum(originRule.getPartitionNum()); } if (StringUtils.isEmpty(sqlBlockRule.getTabletNum().toString())) { sqlBlockRule.setTabletNum(originRule.getTabletNum()); } if (StringUtils.isEmpty(sqlBlockRule.getCardinality().toString())) { sqlBlockRule.setCardinality(originRule.getCardinality()); } if (sqlBlockRule.getGlobal() == null) { sqlBlockRule.setGlobal(originRule.getGlobal()); } if (sqlBlockRule.getEnable() == null) { sqlBlockRule.setEnable(originRule.getEnable()); } unprotectedUpdate(sqlBlockRule); Catalog.getCurrentCatalog().getEditLog().logAlterSqlBlockRule(sqlBlockRule); } finally { writeUnlock(); } }
class SqlBlockRuleMgr implements Writable { private static final Logger LOG = LogManager.getLogger(SqlBlockRuleMgr.class); private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true); @SerializedName(value = "nameToSqlBlockRuleMap") private Map<String, SqlBlockRule> nameToSqlBlockRuleMap = Maps.newConcurrentMap(); private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public boolean existRule(String name) { return nameToSqlBlockRuleMap.containsKey(name); } public List<SqlBlockRule> getSqlBlockRule(ShowSqlBlockRuleStmt stmt) throws AnalysisException { String ruleName = stmt.getRuleName(); if (StringUtils.isNotEmpty(ruleName)) { if (nameToSqlBlockRuleMap.containsKey(ruleName)) { SqlBlockRule sqlBlockRule = nameToSqlBlockRuleMap.get(ruleName); return Lists.newArrayList(sqlBlockRule); } return Lists.newArrayList(); } return Lists.newArrayList(nameToSqlBlockRuleMap.values()); } public void createSqlBlockRule(CreateSqlBlockRuleStmt stmt) throws UserException { writeLock(); try { SqlBlockRule sqlBlockRule = SqlBlockRule.fromCreateStmt(stmt); String ruleName = sqlBlockRule.getName(); if (existRule(ruleName)) { throw new DdlException("the sql block rule " + ruleName + " already create"); } unprotectedAdd(sqlBlockRule); Catalog.getCurrentCatalog().getEditLog().logCreateSqlBlockRule(sqlBlockRule); } finally { writeUnlock(); } } public void replayCreate(SqlBlockRule sqlBlockRule) { unprotectedAdd(sqlBlockRule); LOG.info("replay create sql block rule: {}", sqlBlockRule); } public void replayAlter(SqlBlockRule sqlBlockRule) { unprotectedUpdate(sqlBlockRule); LOG.info("replay alter sql block rule: {}", sqlBlockRule); } public void unprotectedUpdate(SqlBlockRule sqlBlockRule) { nameToSqlBlockRuleMap.put(sqlBlockRule.getName(), sqlBlockRule); } public void unprotectedAdd(SqlBlockRule sqlBlockRule) { nameToSqlBlockRuleMap.put(sqlBlockRule.getName(), sqlBlockRule); } public void dropSqlBlockRule(DropSqlBlockRuleStmt stmt) throws DdlException { writeLock(); try { List<String> ruleNames = stmt.getRuleNames(); for (String ruleName : ruleNames) { if (!existRule(ruleName)) { throw new DdlException("the sql block rule " + ruleName + " not exist"); } } unprotectedDrop(ruleNames); Catalog.getCurrentCatalog().getEditLog().logDropSqlBlockRule(ruleNames); } finally { writeUnlock(); } } public void replayDrop(List<String> ruleNames) { unprotectedDrop(ruleNames); LOG.info("replay drop sql block ruleNames: {}", ruleNames); } public void unprotectedDrop(List<String> ruleNames) { ruleNames.forEach(name -> nameToSqlBlockRuleMap.remove(name)); } public void matchSql(String originSql, String sqlHash, String user) throws AnalysisException { List<SqlBlockRule> globalRules = nameToSqlBlockRuleMap.values().stream().filter(SqlBlockRule::getGlobal).collect(Collectors.toList()); for (SqlBlockRule rule : globalRules) { matchSql(rule, originSql, sqlHash); } String[] bindSqlBlockRules = Catalog.getCurrentCatalog().getAuth().getSqlBlockRules(user); for (String ruleName : bindSqlBlockRules) { SqlBlockRule rule = nameToSqlBlockRuleMap.get(ruleName); if (rule == null) { continue; } matchSql(rule, originSql, sqlHash); } } public void matchSql(SqlBlockRule rule, String originSql, String sqlHash) throws AnalysisException { if (rule.getEnable()) { if (StringUtils.isNotEmpty(rule.getSqlHash()) && (!CreateSqlBlockRuleStmt.STRING_NOT_SET.equals(rule.getSqlHash()) && rule.getSqlHash().equals(sqlHash))) { MetricRepo.COUNTER_HIT_SQL_BLOCK_RULE.increase(1L); throw new AnalysisException("sql match hash sql block rule: " + rule.getName()); } else if (StringUtils.isNotEmpty(rule.getSql()) && (!CreateSqlBlockRuleStmt.STRING_NOT_SET.equals(rule.getSql()) && rule.getSqlPattern().matcher(originSql).find())) { MetricRepo.COUNTER_HIT_SQL_BLOCK_RULE.increase(1L); throw new AnalysisException("sql match regex sql block rule: " + rule.getName()); } } } public void checkLimitaions(Long partitionNum, Long tabletNum, Long cardinality, String user) throws AnalysisException { List<SqlBlockRule> globalRules = nameToSqlBlockRuleMap.values().stream().filter(SqlBlockRule::getGlobal).collect(Collectors.toList()); for (SqlBlockRule rule : globalRules) { checkLimitaions(rule, partitionNum, tabletNum, cardinality); } String[] bindSqlBlockRules = Catalog.getCurrentCatalog().getAuth().getSqlBlockRules(user); for (String ruleName : bindSqlBlockRules) { SqlBlockRule rule = nameToSqlBlockRuleMap.get(ruleName); if (rule == null) { continue; } checkLimitaions(rule, partitionNum, tabletNum, cardinality); } } public void checkLimitaions(SqlBlockRule rule, Long partitionNum, Long tabletNum, Long cardinality) throws AnalysisException { if (rule.getPartitionNum() == 0 && rule.getTabletNum() == 0 && rule.getCardinality() == 0) { return; } else if (rule.getEnable()) { if ((rule.getPartitionNum() != 0 && rule.getPartitionNum() < partitionNum) || (rule.getTabletNum() != 0 && rule.getTabletNum() < tabletNum) || (rule.getCardinality() != 0 && rule.getCardinality() < cardinality)) { MetricRepo.COUNTER_HIT_SQL_BLOCK_RULE.increase(1L); if (rule.getPartitionNum() < partitionNum) { throw new AnalysisException("sql hits sql block rule: " + rule.getName() + ", reach partitionNum : " + rule.getPartitionNum()); } else if (rule.getTabletNum() < tabletNum) { throw new AnalysisException("sql hits sql block rule: " + rule.getName() + ", reach tabletNum : " + rule.getTabletNum()); } else if (rule.getCardinality() < cardinality) { throw new AnalysisException("sql hits sql block rule: " + rule.getName() + ", reach cardinality : " + rule.getCardinality()); } } } } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, GsonUtils.GSON.toJson(this)); } public static SqlBlockRuleMgr read(DataInput in) throws IOException { String json = Text.readString(in); return GsonUtils.GSON.fromJson(json, SqlBlockRuleMgr.class); } }
class SqlBlockRuleMgr implements Writable { private static final Logger LOG = LogManager.getLogger(SqlBlockRuleMgr.class); private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true); @SerializedName(value = "nameToSqlBlockRuleMap") private Map<String, SqlBlockRule> nameToSqlBlockRuleMap = Maps.newConcurrentMap(); private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public boolean existRule(String name) { return nameToSqlBlockRuleMap.containsKey(name); } public List<SqlBlockRule> getSqlBlockRule(ShowSqlBlockRuleStmt stmt) throws AnalysisException { String ruleName = stmt.getRuleName(); if (StringUtils.isNotEmpty(ruleName)) { if (nameToSqlBlockRuleMap.containsKey(ruleName)) { SqlBlockRule sqlBlockRule = nameToSqlBlockRuleMap.get(ruleName); return Lists.newArrayList(sqlBlockRule); } return Lists.newArrayList(); } return Lists.newArrayList(nameToSqlBlockRuleMap.values()); } public void createSqlBlockRule(CreateSqlBlockRuleStmt stmt) throws UserException { writeLock(); try { SqlBlockRule sqlBlockRule = SqlBlockRule.fromCreateStmt(stmt); String ruleName = sqlBlockRule.getName(); if (existRule(ruleName)) { throw new DdlException("the sql block rule " + ruleName + " already create"); } unprotectedAdd(sqlBlockRule); Catalog.getCurrentCatalog().getEditLog().logCreateSqlBlockRule(sqlBlockRule); } finally { writeUnlock(); } } public void replayCreate(SqlBlockRule sqlBlockRule) { unprotectedAdd(sqlBlockRule); LOG.info("replay create sql block rule: {}", sqlBlockRule); } public void replayAlter(SqlBlockRule sqlBlockRule) { unprotectedUpdate(sqlBlockRule); LOG.info("replay alter sql block rule: {}", sqlBlockRule); } public void unprotectedUpdate(SqlBlockRule sqlBlockRule) { nameToSqlBlockRuleMap.put(sqlBlockRule.getName(), sqlBlockRule); } public void unprotectedAdd(SqlBlockRule sqlBlockRule) { nameToSqlBlockRuleMap.put(sqlBlockRule.getName(), sqlBlockRule); } public void dropSqlBlockRule(DropSqlBlockRuleStmt stmt) throws DdlException { writeLock(); try { List<String> ruleNames = stmt.getRuleNames(); for (String ruleName : ruleNames) { if (!existRule(ruleName)) { throw new DdlException("the sql block rule " + ruleName + " not exist"); } } unprotectedDrop(ruleNames); Catalog.getCurrentCatalog().getEditLog().logDropSqlBlockRule(ruleNames); } finally { writeUnlock(); } } public void replayDrop(List<String> ruleNames) { unprotectedDrop(ruleNames); LOG.info("replay drop sql block ruleNames: {}", ruleNames); } public void unprotectedDrop(List<String> ruleNames) { ruleNames.forEach(name -> nameToSqlBlockRuleMap.remove(name)); } public void matchSql(String originSql, String sqlHash, String user) throws AnalysisException { List<SqlBlockRule> globalRules = nameToSqlBlockRuleMap.values().stream().filter(SqlBlockRule::getGlobal).collect(Collectors.toList()); for (SqlBlockRule rule : globalRules) { matchSql(rule, originSql, sqlHash); } String[] bindSqlBlockRules = Catalog.getCurrentCatalog().getAuth().getSqlBlockRules(user); for (String ruleName : bindSqlBlockRules) { SqlBlockRule rule = nameToSqlBlockRuleMap.get(ruleName); if (rule == null) { continue; } matchSql(rule, originSql, sqlHash); } } public void matchSql(SqlBlockRule rule, String originSql, String sqlHash) throws AnalysisException { if (rule.getEnable()) { if (StringUtils.isNotEmpty(rule.getSqlHash()) && (!CreateSqlBlockRuleStmt.STRING_NOT_SET.equals(rule.getSqlHash()) && rule.getSqlHash().equals(sqlHash))) { MetricRepo.COUNTER_HIT_SQL_BLOCK_RULE.increase(1L); throw new AnalysisException("sql match hash sql block rule: " + rule.getName()); } else if (StringUtils.isNotEmpty(rule.getSql()) && (!CreateSqlBlockRuleStmt.STRING_NOT_SET.equals(rule.getSql()) && rule.getSqlPattern().matcher(originSql).find())) { MetricRepo.COUNTER_HIT_SQL_BLOCK_RULE.increase(1L); throw new AnalysisException("sql match regex sql block rule: " + rule.getName()); } } } public void checkLimitaions(Long partitionNum, Long tabletNum, Long cardinality, String user) throws AnalysisException { List<SqlBlockRule> globalRules = nameToSqlBlockRuleMap.values().stream().filter(SqlBlockRule::getGlobal).collect(Collectors.toList()); for (SqlBlockRule rule : globalRules) { checkLimitaions(rule, partitionNum, tabletNum, cardinality); } String[] bindSqlBlockRules = Catalog.getCurrentCatalog().getAuth().getSqlBlockRules(user); for (String ruleName : bindSqlBlockRules) { SqlBlockRule rule = nameToSqlBlockRuleMap.get(ruleName); if (rule == null) { continue; } checkLimitaions(rule, partitionNum, tabletNum, cardinality); } } public void checkLimitaions(SqlBlockRule rule, Long partitionNum, Long tabletNum, Long cardinality) throws AnalysisException { if (rule.getPartitionNum() == 0 && rule.getTabletNum() == 0 && rule.getCardinality() == 0) { return; } else if (rule.getEnable()) { if ((rule.getPartitionNum() != 0 && rule.getPartitionNum() < partitionNum) || (rule.getTabletNum() != 0 && rule.getTabletNum() < tabletNum) || (rule.getCardinality() != 0 && rule.getCardinality() < cardinality)) { MetricRepo.COUNTER_HIT_SQL_BLOCK_RULE.increase(1L); if (rule.getPartitionNum() < partitionNum) { throw new AnalysisException("sql hits sql block rule: " + rule.getName() + ", reach partition_num : " + rule.getPartitionNum()); } else if (rule.getTabletNum() < tabletNum) { throw new AnalysisException("sql hits sql block rule: " + rule.getName() + ", reach tablet_num : " + rule.getTabletNum()); } else if (rule.getCardinality() < cardinality) { throw new AnalysisException("sql hits sql block rule: " + rule.getName() + ", reach cardinality : " + rule.getCardinality()); } } } } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, GsonUtils.GSON.toJson(this)); } public static SqlBlockRuleMgr read(DataInput in) throws IOException { String json = Text.readString(in); return GsonUtils.GSON.fromJson(json, SqlBlockRuleMgr.class); } }
Thanks for the review @dawidwys! Hmm.. I think the current approach is better because: 1) The code is less and also simpler to understand. 2) The approach makes less assumption in the sense that it just prevents closeOnFlush() from being called recursively (which we know will lead to live lock). In comparison, the alternative approach assumes that closeOnFlush() will never need to be called more than once for the same Channel), which may be true but is (IMO) not that obvious. Is there any concern that we allow closeOnFlush() to be called multiple times for the same channel? If not, maybe it is better to take the simpler approach.
static void closeOnFlush(Channel channel) { if (channel.isConnected() && !channelsBeingClosed.containsKey(channel)) { channelsBeingClosed.put(channel, channel); channel.write(ChannelBuffers.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE); channelsBeingClosed.remove(channel); } }
}
static void closeOnFlush(Channel channel) { if (channel.isConnected() && !channelsBeingClosed.contains(channel)) { channelsBeingClosed.add(channel); channel.write(ChannelBuffers.EMPTY_BUFFER).addListener(CLOSE_WITH_BOOKKEEPING); } }
class NetworkFailureHandler extends SimpleChannelUpstreamHandler { private static final Logger LOG = LoggerFactory.getLogger(NetworkFailureHandler.class); private static final String TARGET_CHANNEL_HANDLER_NAME = "target_channel_handler"; private static final Map<Channel, Channel> channelsBeingClosed = new ConcurrentHashMap<>(); private final Map<Channel, Channel> sourceToTargetChannels = new ConcurrentHashMap<>(); private final Consumer<NetworkFailureHandler> onClose; private final ClientSocketChannelFactory channelFactory; private final String remoteHost; private final int remotePort; private final AtomicBoolean blocked; public NetworkFailureHandler( AtomicBoolean blocked, Consumer<NetworkFailureHandler> onClose, ClientSocketChannelFactory channelFactory, String remoteHost, int remotePort) { this.blocked = blocked; this.onClose = onClose; this.channelFactory = channelFactory; this.remoteHost = remoteHost; this.remotePort = remotePort; } /** Closes the specified channel after all queued write requests are flushed. */ public void closeConnections() { for (Map.Entry<Channel, Channel> entry : sourceToTargetChannels.entrySet()) { entry.getKey().close(); } } @Override public void channelOpen(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { final Channel sourceChannel = event.getChannel(); sourceChannel.setReadable(false); boolean isBlocked = blocked.get(); LOG.debug( "Attempt to open proxy channel from [{}] to [{}:{}] in state [blocked = {}]", sourceChannel.getLocalAddress(), remoteHost, remotePort, isBlocked); if (isBlocked) { sourceChannel.close(); return; } ClientBootstrap targetConnectionBootstrap = new ClientBootstrap(channelFactory); targetConnectionBootstrap .getPipeline() .addLast( TARGET_CHANNEL_HANDLER_NAME, new TargetChannelHandler(event.getChannel(), blocked)); ChannelFuture connectFuture = targetConnectionBootstrap.connect(new InetSocketAddress(remoteHost, remotePort)); sourceToTargetChannels.put(sourceChannel, connectFuture.getChannel()); connectFuture.addListener( future -> { if (future.isSuccess()) { sourceChannel.setReadable(true); } else { sourceChannel.close(); } }); } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (blocked.get()) { return; } ChannelBuffer msg = (ChannelBuffer) event.getMessage(); Channel targetChannel = sourceToTargetChannels.get(event.getChannel()); if (targetChannel == null) { throw new IllegalStateException( "Could not find a target channel for the source channel"); } targetChannel.write(msg); } @Override public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { Channel targetChannel = sourceToTargetChannels.get(event.getChannel()); if (targetChannel == null) { return; } closeOnFlush(targetChannel); sourceToTargetChannels.remove(event.getChannel()); onClose.accept(this); } @Override public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) throws Exception { LOG.error("Closing communication channel because of an exception", event.getCause()); closeOnFlush(event.getChannel()); } private static class TargetChannelHandler extends SimpleChannelUpstreamHandler { private final Channel sourceChannel; private final AtomicBoolean blocked; TargetChannelHandler(Channel sourceChannel, AtomicBoolean blocked) { this.sourceChannel = sourceChannel; this.blocked = blocked; } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (blocked.get()) { return; } ChannelBuffer msg = (ChannelBuffer) event.getMessage(); sourceChannel.write(msg); } @Override public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { closeOnFlush(sourceChannel); } @Override public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) throws Exception { LOG.error("Closing communication channel because of an exception", event.getCause()); closeOnFlush(event.getChannel()); } } }
class NetworkFailureHandler extends SimpleChannelUpstreamHandler { private static final Logger LOG = LoggerFactory.getLogger(NetworkFailureHandler.class); private static final String TARGET_CHANNEL_HANDLER_NAME = "target_channel_handler"; private final Map<Channel, Channel> sourceToTargetChannels = new ConcurrentHashMap<>(); private final Consumer<NetworkFailureHandler> onClose; private final ClientSocketChannelFactory channelFactory; private final String remoteHost; private final int remotePort; private final AtomicBoolean blocked; private static final Set<Channel> channelsBeingClosed = ConcurrentHashMap.newKeySet(); private static final ChannelFutureListener CLOSE_WITH_BOOKKEEPING = new ChannelFutureListener() { public void operationComplete(ChannelFuture future) { future.getChannel() .close() .addListener( channelFuture -> channelsBeingClosed.remove(channelFuture.getChannel())); } }; public NetworkFailureHandler( AtomicBoolean blocked, Consumer<NetworkFailureHandler> onClose, ClientSocketChannelFactory channelFactory, String remoteHost, int remotePort) { this.blocked = blocked; this.onClose = onClose; this.channelFactory = channelFactory; this.remoteHost = remoteHost; this.remotePort = remotePort; } /** Closes the specified channel after all queued write requests are flushed. */ public void closeConnections() { for (Map.Entry<Channel, Channel> entry : sourceToTargetChannels.entrySet()) { entry.getKey().close(); } } @Override public void channelOpen(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { final Channel sourceChannel = event.getChannel(); sourceChannel.setReadable(false); boolean isBlocked = blocked.get(); LOG.debug( "Attempt to open proxy channel from [{}] to [{}:{}] in state [blocked = {}]", sourceChannel.getLocalAddress(), remoteHost, remotePort, isBlocked); if (isBlocked) { sourceChannel.close(); return; } ClientBootstrap targetConnectionBootstrap = new ClientBootstrap(channelFactory); targetConnectionBootstrap .getPipeline() .addLast( TARGET_CHANNEL_HANDLER_NAME, new TargetChannelHandler(event.getChannel(), blocked)); ChannelFuture connectFuture = targetConnectionBootstrap.connect(new InetSocketAddress(remoteHost, remotePort)); sourceToTargetChannels.put(sourceChannel, connectFuture.getChannel()); connectFuture.addListener( future -> { if (future.isSuccess()) { sourceChannel.setReadable(true); } else { sourceChannel.close(); } }); } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (blocked.get()) { return; } ChannelBuffer msg = (ChannelBuffer) event.getMessage(); Channel targetChannel = sourceToTargetChannels.get(event.getChannel()); if (targetChannel == null) { throw new IllegalStateException( "Could not find a target channel for the source channel"); } targetChannel.write(msg); } @Override public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { Channel targetChannel = sourceToTargetChannels.get(event.getChannel()); if (targetChannel == null) { return; } closeOnFlush(targetChannel); sourceToTargetChannels.remove(event.getChannel()); onClose.accept(this); } @Override public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) throws Exception { LOG.error("Closing communication channel because of an exception", event.getCause()); closeOnFlush(event.getChannel()); } private static class TargetChannelHandler extends SimpleChannelUpstreamHandler { private final Channel sourceChannel; private final AtomicBoolean blocked; TargetChannelHandler(Channel sourceChannel, AtomicBoolean blocked) { this.sourceChannel = sourceChannel; this.blocked = blocked; } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (blocked.get()) { return; } ChannelBuffer msg = (ChannelBuffer) event.getMessage(); sourceChannel.write(msg); } @Override public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { closeOnFlush(sourceChannel); } @Override public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) throws Exception { LOG.error("Closing communication channel because of an exception", event.getCause()); closeOnFlush(event.getChannel()); } } }
The original `Uni` is never cached by the extension. What is cached is an [UnresolvedUniValue](https://github.com/quarkusio/quarkus/blob/main/extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/UnresolvedUniValue.java) at first which is then replaced with the actual item emitted by the original `Uni` once it's been resolved. After that, when the cached value is accessed, a `Uni.createFrom().item(theResolvedValue)` is returned. This is another part of the doc update I'm working on 😄
void testCacheResult() { Uni<String> uni1 = cachedService.cacheResult1(KEY_1); assertEquals(0, cachedService.getCacheResultInvocations()); Uni<String> uni2 = cachedService.cacheResult1(KEY_1); assertEquals(0, cachedService.getCacheResultInvocations()); assertNotSame(uni1, uni2); String emittedItem1 = uni1.await().indefinitely(); assertEquals(1, cachedService.getCacheResultInvocations()); String emittedItem2 = uni2.await().indefinitely(); assertEquals(1, cachedService.getCacheResultInvocations()); assertSame(emittedItem1, emittedItem2); String emittedItem3 = cachedService.cacheResult1("another-key").await().indefinitely(); assertEquals(2, cachedService.getCacheResultInvocations()); assertNotSame(emittedItem2, emittedItem3); }
void testCacheResult() { Uni<String> uni1 = cachedService.cacheResult1(KEY_1); assertEquals(0, cachedService.getCacheResultInvocations()); Uni<String> uni2 = cachedService.cacheResult1(KEY_1); assertEquals(0, cachedService.getCacheResultInvocations()); assertNotSame(uni1, uni2); String emittedItem1 = uni1.await().indefinitely(); assertEquals(1, cachedService.getCacheResultInvocations()); String emittedItem2 = uni2.await().indefinitely(); assertEquals(1, cachedService.getCacheResultInvocations()); assertSame(emittedItem1, emittedItem2); String emittedItem3 = cachedService.cacheResult1("another-key").await().indefinitely(); assertEquals(2, cachedService.getCacheResultInvocations()); assertNotSame(emittedItem2, emittedItem3); }
class UniReturnTypeTest { private static final String CACHE_NAME_1 = "test-cache-1"; private static final String CACHE_NAME_2 = "test-cache-2"; private static final String KEY_1 = "key-1"; private static final String KEY_2 = "key-2"; @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest().withApplicationRoot((jar) -> jar.addClass(CachedService.class)); @Inject CachedService cachedService; @Test @Test void testCacheInvalidate() { String value1 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value2 = cachedService.cacheResult2(KEY_1).await().indefinitely(); Object value3 = cachedService.cacheResult2(KEY_2).await().indefinitely(); Uni<Void> invalidateUni = cachedService.cacheInvalidate(KEY_1); assertEquals(0, cachedService.getCacheInvalidateInvocations()); String value4 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value5 = cachedService.cacheResult2(KEY_1).await().indefinitely(); Object value6 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertSame(value1, value4); assertSame(value2, value5); assertSame(value3, value6); invalidateUni.await().indefinitely(); assertEquals(1, cachedService.getCacheInvalidateInvocations()); String value7 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value8 = cachedService.cacheResult2(KEY_1).await().indefinitely(); Object value9 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertNotSame(value4, value7); assertNotSame(value5, value8); assertSame(value6, value9); } @Test void testCacheInvalidateAll() { String value1 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value2 = cachedService.cacheResult2(KEY_2).await().indefinitely(); Uni<Void> invalidateAllUni = cachedService.cacheInvalidateAll(); assertEquals(0, cachedService.getCacheInvalidateAllInvocations()); String value3 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value4 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertSame(value1, value3); assertSame(value2, value4); invalidateAllUni.await().indefinitely(); assertEquals(1, cachedService.getCacheInvalidateAllInvocations()); String value5 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value6 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertNotSame(value1, value5); assertNotSame(value2, value6); } @ApplicationScoped static class CachedService { private volatile int cacheResultInvocations; private volatile int cacheInvalidateInvocations; private volatile int cacheInvalidateAllInvocations; @CacheResult(cacheName = CACHE_NAME_1) public Uni<String> cacheResult1(String key) { cacheResultInvocations++; return Uni.createFrom().item(() -> new String()); } @CacheResult(cacheName = CACHE_NAME_2) public Uni<Object> cacheResult2(String key) { return Uni.createFrom().item(() -> new Object()); } @CacheInvalidate(cacheName = CACHE_NAME_1) @CacheInvalidate(cacheName = CACHE_NAME_2) public Uni<Void> cacheInvalidate(String key) { cacheInvalidateInvocations++; return Uni.createFrom().nullItem(); } @CacheInvalidateAll(cacheName = CACHE_NAME_1) @CacheInvalidateAll(cacheName = CACHE_NAME_2) public Uni<Void> cacheInvalidateAll() { cacheInvalidateAllInvocations++; return Uni.createFrom().nullItem(); } public int getCacheResultInvocations() { return cacheResultInvocations; } public int getCacheInvalidateInvocations() { return cacheInvalidateInvocations; } public int getCacheInvalidateAllInvocations() { return cacheInvalidateAllInvocations; } } }
class UniReturnTypeTest { private static final String CACHE_NAME_1 = "test-cache-1"; private static final String CACHE_NAME_2 = "test-cache-2"; private static final String KEY_1 = "key-1"; private static final String KEY_2 = "key-2"; @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest().withApplicationRoot((jar) -> jar.addClass(CachedService.class)); @Inject CachedService cachedService; @Test @Test void testCacheInvalidate() { String value1 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value2 = cachedService.cacheResult2(KEY_1).await().indefinitely(); Object value3 = cachedService.cacheResult2(KEY_2).await().indefinitely(); Uni<Void> invalidateUni = cachedService.cacheInvalidate(KEY_1); assertEquals(0, cachedService.getCacheInvalidateInvocations()); String value4 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value5 = cachedService.cacheResult2(KEY_1).await().indefinitely(); Object value6 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertSame(value1, value4); assertSame(value2, value5); assertSame(value3, value6); invalidateUni.await().indefinitely(); assertEquals(1, cachedService.getCacheInvalidateInvocations()); String value7 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value8 = cachedService.cacheResult2(KEY_1).await().indefinitely(); Object value9 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertNotSame(value4, value7); assertNotSame(value5, value8); assertSame(value6, value9); } @Test void testCacheInvalidateAll() { String value1 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value2 = cachedService.cacheResult2(KEY_2).await().indefinitely(); Uni<Void> invalidateAllUni = cachedService.cacheInvalidateAll(); assertEquals(0, cachedService.getCacheInvalidateAllInvocations()); String value3 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value4 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertSame(value1, value3); assertSame(value2, value4); invalidateAllUni.await().indefinitely(); assertEquals(1, cachedService.getCacheInvalidateAllInvocations()); String value5 = cachedService.cacheResult1(KEY_1).await().indefinitely(); Object value6 = cachedService.cacheResult2(KEY_2).await().indefinitely(); assertNotSame(value1, value5); assertNotSame(value2, value6); } @ApplicationScoped static class CachedService { private volatile int cacheResultInvocations; private volatile int cacheInvalidateInvocations; private volatile int cacheInvalidateAllInvocations; @CacheResult(cacheName = CACHE_NAME_1) public Uni<String> cacheResult1(String key) { cacheResultInvocations++; return Uni.createFrom().item(() -> new String()); } @CacheResult(cacheName = CACHE_NAME_2) public Uni<Object> cacheResult2(String key) { return Uni.createFrom().item(() -> new Object()); } @CacheInvalidate(cacheName = CACHE_NAME_1) @CacheInvalidate(cacheName = CACHE_NAME_2) public Uni<Void> cacheInvalidate(String key) { cacheInvalidateInvocations++; return Uni.createFrom().nullItem(); } @CacheInvalidateAll(cacheName = CACHE_NAME_1) @CacheInvalidateAll(cacheName = CACHE_NAME_2) public Uni<Void> cacheInvalidateAll() { cacheInvalidateAllInvocations++; return Uni.createFrom().nullItem(); } public int getCacheResultInvocations() { return cacheResultInvocations; } public int getCacheInvalidateInvocations() { return cacheInvalidateInvocations; } public int getCacheInvalidateAllInvocations() { return cacheInvalidateAllInvocations; } } }
This change makes error message not clear as before, right?
public RexNode convertResolvedLiteral(ResolvedLiteral resolvedLiteral) { TypeKind kind = resolvedLiteral.getType().getKind(); RexNode ret; switch (kind) { case TYPE_BOOL: case TYPE_INT32: case TYPE_INT64: case TYPE_FLOAT: case TYPE_DOUBLE: case TYPE_STRING: case TYPE_TIMESTAMP: case TYPE_DATE: case TYPE_TIME: case TYPE_BYTES: case TYPE_ARRAY: case TYPE_STRUCT: case TYPE_ENUM: ret = convertValueToRexNode(resolvedLiteral.getType(), resolvedLiteral.getValue()); break; default: throw new RuntimeException("Unsupported ResolvedLiteral type."); } return ret; }
throw new RuntimeException("Unsupported ResolvedLiteral type.");
public RexNode convertResolvedLiteral(ResolvedLiteral resolvedLiteral) { TypeKind kind = resolvedLiteral.getType().getKind(); RexNode ret; switch (kind) { case TYPE_BOOL: case TYPE_INT32: case TYPE_INT64: case TYPE_FLOAT: case TYPE_DOUBLE: case TYPE_STRING: case TYPE_TIMESTAMP: case TYPE_DATE: case TYPE_TIME: case TYPE_BYTES: case TYPE_ARRAY: case TYPE_STRUCT: case TYPE_ENUM: ret = convertValueToRexNode(resolvedLiteral.getType(), resolvedLiteral.getValue()); break; default: throw new RuntimeException( MessageFormat.format( "Unsupported ResolvedLiteral type: {0}, kind: {1}, value: {2}, class: {3}", resolvedLiteral.getType().typeName(), kind, resolvedLiteral.getValue(), resolvedLiteral.getClass())); } return ret; }
class ExpressionConverter { private static final String PRE_DEFINED_WINDOW_FUNCTIONS = "pre_defined_window_functions"; private static final String WINDOW_START = "_START"; private static final String WINDOW_END = "_END"; private static final String FIXED_WINDOW = "TUMBLE"; private static final String FIXED_WINDOW_START = FIXED_WINDOW + WINDOW_START; private static final String FIXED_WINDOW_END = FIXED_WINDOW + WINDOW_END; private static final String SLIDING_WINDOW = "HOP"; private static final String SLIDING_WINDOW_START = SLIDING_WINDOW + WINDOW_START; private static final String SLIDING_WINDOW_END = SLIDING_WINDOW + WINDOW_END; private static final String SESSION_WINDOW = "SESSION"; private static final String SESSION_WINDOW_START = SESSION_WINDOW + WINDOW_START; private static final String SESSION_WINDOW_END = SESSION_WINDOW + WINDOW_END; private static final ImmutableMap<String, String> WINDOW_START_END_TO_WINDOW_MAP = ImmutableMap.<String, String>builder() .put(FIXED_WINDOW_START, FIXED_WINDOW) .put(FIXED_WINDOW_END, FIXED_WINDOW) .put(SLIDING_WINDOW_START, SLIDING_WINDOW) .put(SLIDING_WINDOW_END, SLIDING_WINDOW) .put(SESSION_WINDOW_START, SESSION_WINDOW) .put(SESSION_WINDOW_END, SESSION_WINDOW) .build(); private static final ImmutableSet<String> WINDOW_START_END_FUNCTION_SET = ImmutableSet.of( FIXED_WINDOW_START, FIXED_WINDOW_END, SLIDING_WINDOW_START, SLIDING_WINDOW_END, SESSION_WINDOW_START, SESSION_WINDOW_END); private static final ImmutableMap<TypeKind, ImmutableSet<TypeKind>> UNSUPPORTED_CASTING = ImmutableMap.<TypeKind, ImmutableSet<TypeKind>>builder() .put(TYPE_INT64, ImmutableSet.of(TYPE_DOUBLE)) .put(TYPE_BOOL, ImmutableSet.of(TYPE_STRING)) .put(TYPE_STRING, ImmutableSet.of(TYPE_BOOL, TYPE_DOUBLE)) .build(); private static final ImmutableMap<Integer, TimeUnit> TIME_UNIT_CASTING_MAP = ImmutableMap.<Integer, TimeUnit>builder() .put(DateTimestampPart.YEAR.getNumber(), TimeUnit.YEAR) .put(DateTimestampPart.MONTH.getNumber(), TimeUnit.MONTH) .put(DateTimestampPart.DAY.getNumber(), TimeUnit.DAY) .put(DateTimestampPart.DAYOFWEEK.getNumber(), TimeUnit.DOW) .put(DateTimestampPart.DAYOFYEAR.getNumber(), TimeUnit.DOY) .put(DateTimestampPart.QUARTER.getNumber(), TimeUnit.QUARTER) .put(DateTimestampPart.HOUR.getNumber(), TimeUnit.HOUR) .put(DateTimestampPart.MINUTE.getNumber(), TimeUnit.MINUTE) .put(DateTimestampPart.SECOND.getNumber(), TimeUnit.SECOND) .put(DateTimestampPart.MILLISECOND.getNumber(), TimeUnit.MILLISECOND) .put(DateTimestampPart.MICROSECOND.getNumber(), TimeUnit.MICROSECOND) .put(DateTimestampPart.NANOSECOND.getNumber(), TimeUnit.NANOSECOND) .put(DateTimestampPart.ISOYEAR.getNumber(), TimeUnit.ISOYEAR) .put(DateTimestampPart.ISOWEEK.getNumber(), TimeUnit.WEEK) .build(); private static final ImmutableSet<String> DATE_PART_UNITS_TO_MILLIS = ImmutableSet.of("DAY", "HOUR", "MINUTE", "SECOND"); private static final ImmutableSet<String> DATE_PART_UNITS_TO_MONTHS = ImmutableSet.of("YEAR"); private static final long ONE_SECOND_IN_MILLIS = 1000L; private static final long ONE_MINUTE_IN_MILLIS = 60L * ONE_SECOND_IN_MILLIS; private static final long ONE_HOUR_IN_MILLIS = 60L * ONE_MINUTE_IN_MILLIS; private static final long ONE_DAY_IN_MILLIS = 24L * ONE_HOUR_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_MONTH_IN_MILLIS = 30L * ONE_DAY_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_YEAR_IN_MILLIS = 365L * ONE_DAY_IN_MILLIS; private static final String INTERVAL_DATE_PART_MSG = "YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND"; private static final String INTERVAL_FORMAT_MSG = "INTERVAL should be set as a STRING in the specific format: \"INTERVAL int64 date_part\"." + " The date_part includes: " + INTERVAL_DATE_PART_MSG; private final RelOptCluster cluster; private final Map<String, Value> queryParams; public ExpressionConverter(RelOptCluster cluster, Map<String, Value> params) { this.cluster = cluster; this.queryParams = params; } /** Extract expressions from a project scan node. */ public List<RexNode> retrieveRexNode(ResolvedProjectScan node, List<RelDataTypeField> fieldList) { List<RexNode> ret = new ArrayList<>(); for (ResolvedColumn column : node.getColumnList()) { int index = -1; if ((index = indexOfResolvedColumnInExprList(node.getExprList(), column)) != -1) { ResolvedComputedColumn computedColumn = node.getExprList().get(index); int windowFieldIndex = -1; if (computedColumn.getExpr().nodeKind() == RESOLVED_FUNCTION_CALL) { String functionName = ((ResolvedFunctionCall) computedColumn.getExpr()).getFunction().getName(); if (WINDOW_START_END_FUNCTION_SET.contains(functionName)) { ResolvedAggregateScan resolvedAggregateScan = (ResolvedAggregateScan) node.getInputScan(); windowFieldIndex = indexOfWindowField( resolvedAggregateScan.getGroupByList(), resolvedAggregateScan.getColumnList(), WINDOW_START_END_TO_WINDOW_MAP.get(functionName)); } } ret.add( convertRexNodeFromComputedColumnWithFieldList( computedColumn, node.getInputScan().getColumnList(), fieldList, windowFieldIndex)); } else { index = indexOfProjectionColumnRef(column.getId(), node.getInputScan().getColumnList()); if (index < 0 || index >= node.getInputScan().getColumnList().size()) { throw new RuntimeException( String.format("Cannot find %s in fieldList %s", column, fieldList)); } ret.add(rexBuilder().makeInputRef(fieldList.get(index).getType(), index)); } } return ret; } /** Extract expressions from order by scan node. */ public List<RexNode> retrieveRexNodeFromOrderByScan( RelOptCluster cluster, ResolvedOrderByScan node, List<RelDataTypeField> fieldList) { final RexBuilder rexBuilder = cluster.getRexBuilder(); List<RexNode> ret = new ArrayList<>(); for (ResolvedColumn column : node.getColumnList()) { int index = indexOfProjectionColumnRef(column.getId(), node.getInputScan().getColumnList()); ret.add(rexBuilder.makeInputRef(fieldList.get(index).getType(), index)); } return ret; } private static int indexOfResolvedColumnInExprList( ImmutableList<ResolvedComputedColumn> exprList, ResolvedColumn column) { if (exprList == null || exprList.isEmpty()) { return -1; } for (int i = 0; i < exprList.size(); i++) { ResolvedComputedColumn computedColumn = exprList.get(i); if (computedColumn.getColumn().equals(column)) { return i; } } return -1; } private static int indexOfWindowField( List<ResolvedComputedColumn> groupByList, List<ResolvedColumn> columnList, String windowFn) { for (ResolvedComputedColumn groupByComputedColumn : groupByList) { if (groupByComputedColumn.getExpr().nodeKind() == RESOLVED_FUNCTION_CALL) { ResolvedFunctionCall functionCall = (ResolvedFunctionCall) groupByComputedColumn.getExpr(); if (functionCall.getFunction().getName().equals(windowFn)) { int ret = indexOfResolvedColumnInColumnList(columnList, groupByComputedColumn.getColumn()); if (ret == -1) { throw new RuntimeException("Cannot find " + windowFn + " in " + groupByList); } else { return ret; } } } } throw new RuntimeException("Cannot find " + windowFn + " in " + groupByList); } private static int indexOfResolvedColumnInColumnList( List<ResolvedColumn> columnList, ResolvedColumn column) { if (columnList == null || columnList.isEmpty()) { return -1; } for (int i = 0; i < columnList.size(); i++) { if (columnList.get(i).equals(column)) { return i; } } return -1; } /** Create a RexNode for a corresponding resolved expression node. */ public RexNode convertRexNodeFromResolvedExpr( ResolvedExpr expr, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { if (columnList == null || fieldList == null) { return convertRexNodeFromResolvedExpr(expr); } RexNode ret; switch (expr.nodeKind()) { case RESOLVED_LITERAL: ret = convertResolvedLiteral((ResolvedLiteral) expr); break; case RESOLVED_COLUMN_REF: ret = convertResolvedColumnRef((ResolvedColumnRef) expr, columnList, fieldList); break; case RESOLVED_FUNCTION_CALL: ret = convertResolvedFunctionCall((ResolvedFunctionCall) expr, columnList, fieldList); break; case RESOLVED_CAST: ret = convertResolvedCast((ResolvedCast) expr, columnList, fieldList); break; case RESOLVED_PARAMETER: ret = convertResolvedParameter((ResolvedParameter) expr); break; case RESOLVED_GET_STRUCT_FIELD: ret = convertResolvedStructFieldAccess((ResolvedGetStructField) expr, columnList, fieldList); break; default: ret = convertRexNodeFromResolvedExpr(expr); } return ret; } /** Create a RexNode for a corresponding resolved expression. */ public RexNode convertRexNodeFromResolvedExpr(ResolvedExpr expr) { RexNode ret; switch (expr.nodeKind()) { case RESOLVED_LITERAL: ret = convertResolvedLiteral((ResolvedLiteral) expr); break; case RESOLVED_COLUMN_REF: ret = convertResolvedColumnRef((ResolvedColumnRef) expr); break; case RESOLVED_FUNCTION_CALL: ret = convertResolvedFunctionCall((ResolvedFunctionCall) expr, null, null); break; case RESOLVED_CAST: ret = convertResolvedCast((ResolvedCast) expr, null, null); break; case RESOLVED_PARAMETER: ret = convertResolvedParameter((ResolvedParameter) expr); break; case RESOLVED_GET_STRUCT_FIELD: ret = convertResolvedStructFieldAccess((ResolvedGetStructField) expr); break; case RESOLVED_SUBQUERY_EXPR: throw new IllegalArgumentException("Does not support sub-queries"); default: throw new RuntimeException("Does not support expr node kind " + expr.nodeKind()); } return ret; } /** Extract the RexNode from expression with ref scan. */ public RexNode convertRexNodeFromResolvedExprWithRefScan( ResolvedExpr expr, List<ResolvedColumn> refScanLeftColumnList, List<RelDataTypeField> leftFieldList, List<ResolvedColumn> originalLeftColumnList, List<ResolvedColumn> refScanRightColumnList, List<RelDataTypeField> rightFieldList, List<ResolvedColumn> originalRightColumnList) { RexNode ret; switch (expr.nodeKind()) { case RESOLVED_LITERAL: ret = convertResolvedLiteral((ResolvedLiteral) expr); break; case RESOLVED_COLUMN_REF: ResolvedColumnRef columnRef = (ResolvedColumnRef) expr; ret = convertRexNodeFromResolvedColumnRefWithRefScan( columnRef, refScanLeftColumnList, originalLeftColumnList, leftFieldList); if (ret == null) { ret = convertRexNodeFromResolvedColumnRefWithRefScan( columnRef, refScanRightColumnList, originalRightColumnList, rightFieldList); } break; case RESOLVED_FUNCTION_CALL: ResolvedFunctionCall resolvedFunctionCall = (ResolvedFunctionCall) expr; List<RexNode> operands = new ArrayList<>(); for (ResolvedExpr resolvedExpr : resolvedFunctionCall.getArgumentList()) { operands.add( convertRexNodeFromResolvedExprWithRefScan( resolvedExpr, refScanLeftColumnList, leftFieldList, originalLeftColumnList, refScanRightColumnList, rightFieldList, originalRightColumnList)); } SqlOperator op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( resolvedFunctionCall.getFunction().getName()); ret = rexBuilder().makeCall(op, operands); break; case RESOLVED_CAST: ResolvedCast resolvedCast = (ResolvedCast) expr; RexNode operand = convertRexNodeFromResolvedExprWithRefScan( resolvedCast.getExpr(), refScanLeftColumnList, leftFieldList, originalLeftColumnList, refScanRightColumnList, rightFieldList, originalRightColumnList); TypeKind fromType = resolvedCast.getExpr().getType().getKind(); TypeKind toType = resolvedCast.getType().getKind(); isCastingSupported(fromType, toType); RelDataType outputType = TypeUtils.toSimpleRelDataType(toType, rexBuilder(), operand.getType().isNullable()); if (isZetaSQLCast(fromType, toType)) { ret = rexBuilder().makeCall(outputType, ZETASQL_CAST_OP, ImmutableList.of(operand)); } else { ret = rexBuilder().makeCast(outputType, operand); } break; default: throw new RuntimeException("Does not support expr node kind " + expr.nodeKind()); } return ret; } private RexNode convertRexNodeFromComputedColumnWithFieldList( ResolvedComputedColumn column, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList, int windowFieldIndex) { if (column.getExpr().nodeKind() != RESOLVED_FUNCTION_CALL) { return convertRexNodeFromResolvedExpr(column.getExpr(), columnList, fieldList); } ResolvedFunctionCall functionCall = (ResolvedFunctionCall) column.getExpr(); if (functionCall.getFunction().getName().equals(FIXED_WINDOW) || functionCall.getFunction().getName().equals(SLIDING_WINDOW) || functionCall.getFunction().getName().equals(SESSION_WINDOW)) { throw new RuntimeException( functionCall.getFunction().getName() + " shouldn't appear in SELECT exprlist."); } if (!functionCall.getFunction().getGroup().equals(PRE_DEFINED_WINDOW_FUNCTIONS)) { return convertRexNodeFromResolvedExpr(column.getExpr(), columnList, fieldList); } List<RexNode> operands = new ArrayList<>(); switch (functionCall.getFunction().getName()) { case FIXED_WINDOW_START: case SLIDING_WINDOW_START: case SESSION_WINDOW_START: case SESSION_WINDOW_END: return rexBuilder() .makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex); case FIXED_WINDOW_END: operands.add( rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(0))); return rexBuilder().makeCall(SqlStdOperatorTable.PLUS, operands); case SLIDING_WINDOW_END: operands.add( rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(1))); return rexBuilder().makeCall(SqlStdOperatorTable.PLUS, operands); default: throw new RuntimeException( "Does not support window start/end: " + functionCall.getFunction().getName()); } } /** Convert a resolved literal to a RexNode. */ private RexNode convertValueToRexNode(Type type, Value value) { RexNode ret; switch (type.getKind()) { case TYPE_BOOL: case TYPE_INT32: case TYPE_INT64: case TYPE_FLOAT: case TYPE_DOUBLE: case TYPE_STRING: case TYPE_TIMESTAMP: case TYPE_DATE: case TYPE_TIME: case TYPE_BYTES: ret = convertSimpleValueToRexNode(type.getKind(), value); break; case TYPE_ARRAY: ret = convertArrayValueToRexNode(type.asArray(), value); break; case TYPE_ENUM: ret = convertEnumToRexNode(type.asEnum(), value); break; default: throw new RuntimeException("Unsupported ResolvedLiteral kind."); } return ret; } private RexNode convertSimpleValueToRexNode(TypeKind kind, Value value) { if (value.isNull()) { return rexBuilder().makeNullLiteral(TypeUtils.toSimpleRelDataType(kind, rexBuilder())); } RexNode ret; switch (kind) { case TYPE_BOOL: ret = rexBuilder().makeLiteral(value.getBoolValue()); break; case TYPE_INT32: ret = rexBuilder() .makeExactLiteral( new BigDecimal(value.getInt32Value()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_INT64: ret = rexBuilder() .makeExactLiteral( new BigDecimal(value.getInt64Value()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_FLOAT: ret = rexBuilder() .makeApproxLiteral( new BigDecimal(value.getFloatValue()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_DOUBLE: ret = rexBuilder() .makeApproxLiteral( new BigDecimal(value.getDoubleValue()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_STRING: ret = rexBuilder() .makeLiteral( value.getStringValue(), typeFactory().createSqlType(SqlTypeName.VARCHAR), true); break; case TYPE_TIMESTAMP: ret = rexBuilder() .makeTimestampLiteral( TimestampString.fromMillisSinceEpoch( safeMicrosToMillis(value.getTimestampUnixMicros())), typeFactory().getTypeSystem().getMaxPrecision(SqlTypeName.TIMESTAMP)); break; case TYPE_DATE: ret = rexBuilder().makeDateLiteral(convertDateValueToDateString(value)); break; case TYPE_TIME: RelDataType timeType = typeFactory() .createSqlType( SqlTypeName.TIME, typeFactory().getTypeSystem().getMaxPrecision(SqlTypeName.TIME)); ret = rexBuilder().makeLiteral(convertTimeValueToTimeString(value), timeType, false); break; case TYPE_BYTES: ret = rexBuilder().makeBinaryLiteral(new ByteString(value.getBytesValue().toByteArray())); break; default: throw new RuntimeException("Unsupported column type."); } return ret; } private RexNode convertArrayValueToRexNode(ArrayType arrayType, Value value) { if (value.isNull()) { return rexBuilder() .makeNullLiteral(TypeUtils.toArrayRelDataType(rexBuilder(), arrayType, false)); } List<RexNode> operands = new ArrayList<>(); for (Value v : value.getElementList()) { operands.add(convertValueToRexNode(arrayType.getElementType(), v)); } return rexBuilder().makeCall(SqlStdOperatorTable.ARRAY_VALUE_CONSTRUCTOR, operands); } private RexNode convertEnumToRexNode(EnumType type, Value value) { if ("zetasql.functions.DateTimestampPart".equals(type.getDescriptor().getFullName())) { return convertTimeUnitRangeEnumToRexNode(type, value); } else { throw new RuntimeException("Unsupported enum."); } } private RexNode convertTimeUnitRangeEnumToRexNode(Type type, Value value) { TimeUnit mappedUnit = TIME_UNIT_CASTING_MAP.get(value.getEnumValue()); if (mappedUnit == null) { throw new RuntimeException("Unsupported enum value."); } TimeUnitRange mappedRange = TimeUnitRange.of(mappedUnit, null); return rexBuilder().makeFlag(mappedRange); } private RexNode convertResolvedColumnRef( ResolvedColumnRef columnRef, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { int index = indexOfProjectionColumnRef(columnRef.getColumn().getId(), columnList); if (index < 0 || index >= columnList.size()) { throw new RuntimeException( String.format("Cannot find %s in fieldList %s", columnRef.getColumn(), fieldList)); } return rexBuilder().makeInputRef(fieldList.get(index).getType(), index); } private RexNode convertResolvedColumnRef(ResolvedColumnRef columnRef) { return rexBuilder() .makeInputRef( TypeUtils.toRelDataType(rexBuilder(), columnRef.getType(), false), (int) columnRef.getColumn().getId() - 1); } /** Return an index of the projection column reference. */ public int indexOfProjectionColumnRef(long colId, List<ResolvedColumn> columnList) { int ret = -1; for (int i = 0; i < columnList.size(); i++) { if (columnList.get(i).getId() == colId) { ret = i; break; } } return ret; } private RexNode convertResolvedFunctionCall( ResolvedFunctionCall functionCall, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { RexNode ret; SqlOperator op; List<RexNode> operands = new ArrayList<>(); if (functionCall.getFunction().getGroup().equals(PRE_DEFINED_WINDOW_FUNCTIONS)) { switch (functionCall.getFunction().getName()) { case FIXED_WINDOW: case SESSION_WINDOW: op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( functionCall.getFunction().getName()); operands.add( convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(0), columnList, fieldList)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(1))); break; case SLIDING_WINDOW: op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( SLIDING_WINDOW); operands.add( convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(0), columnList, fieldList)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(1))); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(2))); break; default: throw new RuntimeException("Only support TUMBLE, HOP AND SESSION functions right now."); } } else if (functionCall.getFunction().getGroup().equals("ZetaSQL")) { op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( functionCall.getFunction().getName()); if (op == null) { throw new RuntimeException( "Does not support ZetaSQL function: " + functionCall.getFunction().getName()); } if (FUNCTION_FAMILY_DATE_ADD.contains(functionCall.getFunction().getName())) { return convertTimestampAddFunction(functionCall, columnList, fieldList); } else { for (ResolvedExpr expr : functionCall.getArgumentList()) { operands.add(convertRexNodeFromResolvedExpr(expr, columnList, fieldList)); } } } else { throw new RuntimeException( "Does not support function group: " + functionCall.getFunction().getGroup()); } SqlOperatorRewriter rewriter = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR_REWRITER.get( functionCall.getFunction().getName()); if (rewriter != null) { ret = rewriter.apply(rexBuilder(), operands); } else { ret = rexBuilder().makeCall(op, operands); } return ret; } private RexNode convertTimestampAddFunction( ResolvedFunctionCall functionCall, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { TimeUnit unit = TIME_UNIT_CASTING_MAP.get( ((ResolvedLiteral) functionCall.getArgumentList().get(2)).getValue().getEnumValue()); if ((unit == TimeUnit.MICROSECOND) || (unit == TimeUnit.NANOSECOND)) { throw Status.UNIMPLEMENTED .withDescription("Micro and Nanoseconds are not supported by Beam ZetaSQL") .asRuntimeException(); } SqlIntervalQualifier qualifier = new SqlIntervalQualifier(unit, null, SqlParserPos.ZERO); RexNode intervalArgumentNode = convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(1), columnList, fieldList); RexNode validatedIntervalArgument = rexBuilder() .makeCall( SqlOperators.VALIDATE_TIME_INTERVAL, intervalArgumentNode, rexBuilder().makeFlag(unit)); RexNode intervalNode = rexBuilder() .makeCall( SqlStdOperatorTable.MULTIPLY, rexBuilder().makeIntervalLiteral(unit.multiplier, qualifier), validatedIntervalArgument); RexNode timestampNode = convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(0), columnList, fieldList); RexNode dateTimePlusResult = rexBuilder().makeCall(SqlStdOperatorTable.DATETIME_PLUS, timestampNode, intervalNode); RexNode validatedTimestampResult = rexBuilder().makeCall(SqlOperators.VALIDATE_TIMESTAMP, dateTimePlusResult); return validatedTimestampResult; } private RexNode convertIntervalToRexIntervalLiteral(ResolvedLiteral resolvedLiteral) { if (resolvedLiteral.getType().getKind() != TYPE_STRING) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG); } String valStr = resolvedLiteral.getValue().getStringValue(); List<String> stringList = Arrays.stream(valStr.split(" ")).filter(s -> !s.isEmpty()).collect(Collectors.toList()); if (stringList.size() != 3) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG); } if (!Ascii.toUpperCase(stringList.get(0)).equals("INTERVAL")) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG); } long intervalValue; try { intervalValue = Long.parseLong(stringList.get(1)); } catch (NumberFormatException e) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG, e); } String intervalDatepart = Ascii.toUpperCase(stringList.get(2)); return createCalciteIntervalRexLiteral(intervalValue, intervalDatepart); } private RexLiteral createCalciteIntervalRexLiteral(long intervalValue, String intervalTimeUnit) { SqlIntervalQualifier sqlIntervalQualifier = convertIntervalDatepartToSqlIntervalQualifier(intervalTimeUnit); BigDecimal decimalValue; if (DATE_PART_UNITS_TO_MILLIS.contains(intervalTimeUnit)) { decimalValue = convertIntervalValueToMillis(sqlIntervalQualifier, intervalValue); } else if (DATE_PART_UNITS_TO_MONTHS.contains(intervalTimeUnit)) { decimalValue = new BigDecimal(intervalValue * 12); } else { decimalValue = new BigDecimal(intervalValue); } return rexBuilder().makeIntervalLiteral(decimalValue, sqlIntervalQualifier); } private static BigDecimal convertIntervalValueToMillis( SqlIntervalQualifier qualifier, long value) { switch (qualifier.typeName()) { case INTERVAL_DAY: return new BigDecimal(value * ONE_DAY_IN_MILLIS); case INTERVAL_HOUR: return new BigDecimal(value * ONE_HOUR_IN_MILLIS); case INTERVAL_MINUTE: return new BigDecimal(value * ONE_MINUTE_IN_MILLIS); case INTERVAL_SECOND: return new BigDecimal(value * ONE_SECOND_IN_MILLIS); default: throw new IllegalArgumentException(qualifier.typeName().toString()); } } private static SqlIntervalQualifier convertIntervalDatepartToSqlIntervalQualifier( String datePart) { switch (datePart) { case "YEAR": return new SqlIntervalQualifier(TimeUnit.YEAR, null, SqlParserPos.ZERO); case "MONTH": return new SqlIntervalQualifier(TimeUnit.MONTH, null, SqlParserPos.ZERO); case "DAY": return new SqlIntervalQualifier(TimeUnit.DAY, null, SqlParserPos.ZERO); case "HOUR": return new SqlIntervalQualifier(TimeUnit.HOUR, null, SqlParserPos.ZERO); case "MINUTE": return new SqlIntervalQualifier(TimeUnit.MINUTE, null, SqlParserPos.ZERO); case "SECOND": return new SqlIntervalQualifier(TimeUnit.SECOND, null, SqlParserPos.ZERO); case "WEEK": return new SqlIntervalQualifier(TimeUnit.WEEK, null, SqlParserPos.ZERO); case "QUARTER": return new SqlIntervalQualifier(TimeUnit.QUARTER, null, SqlParserPos.ZERO); case "MILLISECOND": return new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, SqlParserPos.ZERO); default: throw new RuntimeException( String.format( "Received an undefined INTERVAL unit: %s. Please specify unit from the following" + " list: %s.", datePart, INTERVAL_DATE_PART_MSG)); } } private RexNode convertResolvedCast( ResolvedCast resolvedCast, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { TypeKind fromType = resolvedCast.getExpr().getType().getKind(); TypeKind toType = resolvedCast.getType().getKind(); isCastingSupported(fromType, toType); RexNode inputNode = convertRexNodeFromResolvedExpr(resolvedCast.getExpr(), columnList, fieldList); RelDataType outputType = TypeUtils.toSimpleRelDataType( resolvedCast.getType().getKind(), rexBuilder(), inputNode.getType().isNullable()); if (isZetaSQLCast(fromType, toType)) { return rexBuilder().makeCall(outputType, ZETASQL_CAST_OP, ImmutableList.of(inputNode)); } else { return rexBuilder().makeCast(outputType, inputNode); } } private static void isCastingSupported(TypeKind fromType, TypeKind toType) { if (UNSUPPORTED_CASTING.containsKey(toType) && UNSUPPORTED_CASTING.get(toType).contains(fromType)) { throw new IllegalArgumentException( "Does not support CAST(" + fromType + " AS " + toType + ")"); } } private static boolean isZetaSQLCast(TypeKind fromType, TypeKind toType) { return (fromType.equals(TYPE_BYTES) && toType.equals(TYPE_STRING)) || (fromType.equals(TYPE_INT64) && toType.equals(TYPE_BOOL)) || (fromType.equals(TYPE_BOOL) && toType.equals(TYPE_INT64)) || (fromType.equals(TYPE_TIMESTAMP) && toType.equals(TYPE_STRING)); } private RexNode convertRexNodeFromResolvedColumnRefWithRefScan( ResolvedColumnRef columnRef, List<ResolvedColumn> refScanColumnList, List<ResolvedColumn> originalColumnList, List<RelDataTypeField> fieldList) { for (int i = 0; i < refScanColumnList.size(); i++) { if (refScanColumnList.get(i).getId() == columnRef.getColumn().getId()) { boolean nullable = fieldList.get(i).getType().isNullable(); int off = (int) originalColumnList.get(i).getId() - 1; return rexBuilder() .makeInputRef( TypeUtils.toSimpleRelDataType( columnRef.getType().getKind(), rexBuilder(), nullable), off); } } return null; } private RexNode convertResolvedParameter(ResolvedParameter parameter) { assert parameter.getType().equals(queryParams.get(parameter.getName()).getType()); return convertValueToRexNode( queryParams.get(parameter.getName()).getType(), queryParams.get(parameter.getName())); } private RexNode convertResolvedStructFieldAccess(ResolvedGetStructField resolvedGetStructField) { return rexBuilder() .makeFieldAccess( convertRexNodeFromResolvedExpr(resolvedGetStructField.getExpr()), (int) resolvedGetStructField.getFieldIdx()); } private RexNode convertResolvedStructFieldAccess( ResolvedGetStructField resolvedGetStructField, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { return rexBuilder() .makeFieldAccess( convertRexNodeFromResolvedExpr(resolvedGetStructField.getExpr(), columnList, fieldList), (int) resolvedGetStructField.getFieldIdx()); } private RexBuilder rexBuilder() { return cluster.getRexBuilder(); } private RelDataTypeFactory typeFactory() { return cluster.getTypeFactory(); } }
class ExpressionConverter { private static final String PRE_DEFINED_WINDOW_FUNCTIONS = "pre_defined_window_functions"; private static final String WINDOW_START = "_START"; private static final String WINDOW_END = "_END"; private static final String FIXED_WINDOW = "TUMBLE"; private static final String FIXED_WINDOW_START = FIXED_WINDOW + WINDOW_START; private static final String FIXED_WINDOW_END = FIXED_WINDOW + WINDOW_END; private static final String SLIDING_WINDOW = "HOP"; private static final String SLIDING_WINDOW_START = SLIDING_WINDOW + WINDOW_START; private static final String SLIDING_WINDOW_END = SLIDING_WINDOW + WINDOW_END; private static final String SESSION_WINDOW = "SESSION"; private static final String SESSION_WINDOW_START = SESSION_WINDOW + WINDOW_START; private static final String SESSION_WINDOW_END = SESSION_WINDOW + WINDOW_END; private static final ImmutableMap<String, String> WINDOW_START_END_TO_WINDOW_MAP = ImmutableMap.<String, String>builder() .put(FIXED_WINDOW_START, FIXED_WINDOW) .put(FIXED_WINDOW_END, FIXED_WINDOW) .put(SLIDING_WINDOW_START, SLIDING_WINDOW) .put(SLIDING_WINDOW_END, SLIDING_WINDOW) .put(SESSION_WINDOW_START, SESSION_WINDOW) .put(SESSION_WINDOW_END, SESSION_WINDOW) .build(); private static final ImmutableSet<String> WINDOW_START_END_FUNCTION_SET = ImmutableSet.of( FIXED_WINDOW_START, FIXED_WINDOW_END, SLIDING_WINDOW_START, SLIDING_WINDOW_END, SESSION_WINDOW_START, SESSION_WINDOW_END); private static final ImmutableMap<TypeKind, ImmutableSet<TypeKind>> UNSUPPORTED_CASTING = ImmutableMap.<TypeKind, ImmutableSet<TypeKind>>builder() .put(TYPE_INT64, ImmutableSet.of(TYPE_DOUBLE)) .put(TYPE_BOOL, ImmutableSet.of(TYPE_STRING)) .put(TYPE_STRING, ImmutableSet.of(TYPE_BOOL, TYPE_DOUBLE)) .build(); private static final ImmutableMap<Integer, TimeUnit> TIME_UNIT_CASTING_MAP = ImmutableMap.<Integer, TimeUnit>builder() .put(DateTimestampPart.YEAR.getNumber(), TimeUnit.YEAR) .put(DateTimestampPart.MONTH.getNumber(), TimeUnit.MONTH) .put(DateTimestampPart.DAY.getNumber(), TimeUnit.DAY) .put(DateTimestampPart.DAYOFWEEK.getNumber(), TimeUnit.DOW) .put(DateTimestampPart.DAYOFYEAR.getNumber(), TimeUnit.DOY) .put(DateTimestampPart.QUARTER.getNumber(), TimeUnit.QUARTER) .put(DateTimestampPart.HOUR.getNumber(), TimeUnit.HOUR) .put(DateTimestampPart.MINUTE.getNumber(), TimeUnit.MINUTE) .put(DateTimestampPart.SECOND.getNumber(), TimeUnit.SECOND) .put(DateTimestampPart.MILLISECOND.getNumber(), TimeUnit.MILLISECOND) .put(DateTimestampPart.MICROSECOND.getNumber(), TimeUnit.MICROSECOND) .put(DateTimestampPart.NANOSECOND.getNumber(), TimeUnit.NANOSECOND) .put(DateTimestampPart.ISOYEAR.getNumber(), TimeUnit.ISOYEAR) .put(DateTimestampPart.ISOWEEK.getNumber(), TimeUnit.WEEK) .build(); private static final ImmutableSet<String> DATE_PART_UNITS_TO_MILLIS = ImmutableSet.of("DAY", "HOUR", "MINUTE", "SECOND"); private static final ImmutableSet<String> DATE_PART_UNITS_TO_MONTHS = ImmutableSet.of("YEAR"); private static final long ONE_SECOND_IN_MILLIS = 1000L; private static final long ONE_MINUTE_IN_MILLIS = 60L * ONE_SECOND_IN_MILLIS; private static final long ONE_HOUR_IN_MILLIS = 60L * ONE_MINUTE_IN_MILLIS; private static final long ONE_DAY_IN_MILLIS = 24L * ONE_HOUR_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_MONTH_IN_MILLIS = 30L * ONE_DAY_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_YEAR_IN_MILLIS = 365L * ONE_DAY_IN_MILLIS; private static final String INTERVAL_DATE_PART_MSG = "YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND"; private static final String INTERVAL_FORMAT_MSG = "INTERVAL should be set as a STRING in the specific format: \"INTERVAL int64 date_part\"." + " The date_part includes: " + INTERVAL_DATE_PART_MSG; private final RelOptCluster cluster; private final Map<String, Value> queryParams; public ExpressionConverter(RelOptCluster cluster, Map<String, Value> params) { this.cluster = cluster; this.queryParams = params; } /** Extract expressions from a project scan node. */ public List<RexNode> retrieveRexNode(ResolvedProjectScan node, List<RelDataTypeField> fieldList) { List<RexNode> ret = new ArrayList<>(); for (ResolvedColumn column : node.getColumnList()) { int index = -1; if ((index = indexOfResolvedColumnInExprList(node.getExprList(), column)) != -1) { ResolvedComputedColumn computedColumn = node.getExprList().get(index); int windowFieldIndex = -1; if (computedColumn.getExpr().nodeKind() == RESOLVED_FUNCTION_CALL) { String functionName = ((ResolvedFunctionCall) computedColumn.getExpr()).getFunction().getName(); if (WINDOW_START_END_FUNCTION_SET.contains(functionName)) { ResolvedAggregateScan resolvedAggregateScan = (ResolvedAggregateScan) node.getInputScan(); windowFieldIndex = indexOfWindowField( resolvedAggregateScan.getGroupByList(), resolvedAggregateScan.getColumnList(), WINDOW_START_END_TO_WINDOW_MAP.get(functionName)); } } ret.add( convertRexNodeFromComputedColumnWithFieldList( computedColumn, node.getInputScan().getColumnList(), fieldList, windowFieldIndex)); } else { index = indexOfProjectionColumnRef(column.getId(), node.getInputScan().getColumnList()); if (index < 0 || index >= node.getInputScan().getColumnList().size()) { throw new RuntimeException( String.format("Cannot find %s in fieldList %s", column, fieldList)); } ret.add(rexBuilder().makeInputRef(fieldList.get(index).getType(), index)); } } return ret; } /** Extract expressions from order by scan node. */ public List<RexNode> retrieveRexNodeFromOrderByScan( RelOptCluster cluster, ResolvedOrderByScan node, List<RelDataTypeField> fieldList) { final RexBuilder rexBuilder = cluster.getRexBuilder(); List<RexNode> ret = new ArrayList<>(); for (ResolvedColumn column : node.getColumnList()) { int index = indexOfProjectionColumnRef(column.getId(), node.getInputScan().getColumnList()); ret.add(rexBuilder.makeInputRef(fieldList.get(index).getType(), index)); } return ret; } private static int indexOfResolvedColumnInExprList( ImmutableList<ResolvedComputedColumn> exprList, ResolvedColumn column) { if (exprList == null || exprList.isEmpty()) { return -1; } for (int i = 0; i < exprList.size(); i++) { ResolvedComputedColumn computedColumn = exprList.get(i); if (computedColumn.getColumn().equals(column)) { return i; } } return -1; } private static int indexOfWindowField( List<ResolvedComputedColumn> groupByList, List<ResolvedColumn> columnList, String windowFn) { for (ResolvedComputedColumn groupByComputedColumn : groupByList) { if (groupByComputedColumn.getExpr().nodeKind() == RESOLVED_FUNCTION_CALL) { ResolvedFunctionCall functionCall = (ResolvedFunctionCall) groupByComputedColumn.getExpr(); if (functionCall.getFunction().getName().equals(windowFn)) { int ret = indexOfResolvedColumnInColumnList(columnList, groupByComputedColumn.getColumn()); if (ret == -1) { throw new RuntimeException("Cannot find " + windowFn + " in " + groupByList); } else { return ret; } } } } throw new RuntimeException("Cannot find " + windowFn + " in " + groupByList); } private static int indexOfResolvedColumnInColumnList( List<ResolvedColumn> columnList, ResolvedColumn column) { if (columnList == null || columnList.isEmpty()) { return -1; } for (int i = 0; i < columnList.size(); i++) { if (columnList.get(i).equals(column)) { return i; } } return -1; } /** Create a RexNode for a corresponding resolved expression node. */ public RexNode convertRexNodeFromResolvedExpr( ResolvedExpr expr, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { if (columnList == null || fieldList == null) { return convertRexNodeFromResolvedExpr(expr); } RexNode ret; switch (expr.nodeKind()) { case RESOLVED_LITERAL: ret = convertResolvedLiteral((ResolvedLiteral) expr); break; case RESOLVED_COLUMN_REF: ret = convertResolvedColumnRef((ResolvedColumnRef) expr, columnList, fieldList); break; case RESOLVED_FUNCTION_CALL: ret = convertResolvedFunctionCall((ResolvedFunctionCall) expr, columnList, fieldList); break; case RESOLVED_CAST: ret = convertResolvedCast((ResolvedCast) expr, columnList, fieldList); break; case RESOLVED_PARAMETER: ret = convertResolvedParameter((ResolvedParameter) expr); break; case RESOLVED_GET_STRUCT_FIELD: ret = convertResolvedStructFieldAccess((ResolvedGetStructField) expr, columnList, fieldList); break; default: ret = convertRexNodeFromResolvedExpr(expr); } return ret; } /** Create a RexNode for a corresponding resolved expression. */ public RexNode convertRexNodeFromResolvedExpr(ResolvedExpr expr) { RexNode ret; switch (expr.nodeKind()) { case RESOLVED_LITERAL: ret = convertResolvedLiteral((ResolvedLiteral) expr); break; case RESOLVED_COLUMN_REF: ret = convertResolvedColumnRef((ResolvedColumnRef) expr); break; case RESOLVED_FUNCTION_CALL: ret = convertResolvedFunctionCall((ResolvedFunctionCall) expr, null, null); break; case RESOLVED_CAST: ret = convertResolvedCast((ResolvedCast) expr, null, null); break; case RESOLVED_PARAMETER: ret = convertResolvedParameter((ResolvedParameter) expr); break; case RESOLVED_GET_STRUCT_FIELD: ret = convertResolvedStructFieldAccess((ResolvedGetStructField) expr); break; case RESOLVED_SUBQUERY_EXPR: throw new IllegalArgumentException("Does not support sub-queries"); default: throw new RuntimeException("Does not support expr node kind " + expr.nodeKind()); } return ret; } /** Extract the RexNode from expression with ref scan. */ public RexNode convertRexNodeFromResolvedExprWithRefScan( ResolvedExpr expr, List<ResolvedColumn> refScanLeftColumnList, List<RelDataTypeField> leftFieldList, List<ResolvedColumn> originalLeftColumnList, List<ResolvedColumn> refScanRightColumnList, List<RelDataTypeField> rightFieldList, List<ResolvedColumn> originalRightColumnList) { RexNode ret; switch (expr.nodeKind()) { case RESOLVED_LITERAL: ret = convertResolvedLiteral((ResolvedLiteral) expr); break; case RESOLVED_COLUMN_REF: ResolvedColumnRef columnRef = (ResolvedColumnRef) expr; Optional<RexNode> colRexNode = convertRexNodeFromResolvedColumnRefWithRefScan( columnRef, refScanLeftColumnList, originalLeftColumnList, leftFieldList); if (colRexNode.isPresent()) { ret = colRexNode.get(); break; } colRexNode = convertRexNodeFromResolvedColumnRefWithRefScan( columnRef, refScanRightColumnList, originalRightColumnList, rightFieldList); if (colRexNode.isPresent()) { ret = colRexNode.get(); break; } throw new IllegalArgumentException( String.format( "Could not find column reference %s in %s or %s", columnRef, refScanLeftColumnList, refScanRightColumnList)); case RESOLVED_FUNCTION_CALL: ResolvedFunctionCall resolvedFunctionCall = (ResolvedFunctionCall) expr; List<RexNode> operands = new ArrayList<>(); for (ResolvedExpr resolvedExpr : resolvedFunctionCall.getArgumentList()) { operands.add( convertRexNodeFromResolvedExprWithRefScan( resolvedExpr, refScanLeftColumnList, leftFieldList, originalLeftColumnList, refScanRightColumnList, rightFieldList, originalRightColumnList)); } SqlOperator op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( resolvedFunctionCall.getFunction().getName()); ret = rexBuilder().makeCall(op, operands); break; case RESOLVED_CAST: ResolvedCast resolvedCast = (ResolvedCast) expr; RexNode operand = convertRexNodeFromResolvedExprWithRefScan( resolvedCast.getExpr(), refScanLeftColumnList, leftFieldList, originalLeftColumnList, refScanRightColumnList, rightFieldList, originalRightColumnList); TypeKind fromType = resolvedCast.getExpr().getType().getKind(); TypeKind toType = resolvedCast.getType().getKind(); isCastingSupported(fromType, toType); RelDataType outputType = TypeUtils.toSimpleRelDataType(toType, rexBuilder(), operand.getType().isNullable()); if (isZetaSQLCast(fromType, toType)) { ret = rexBuilder().makeCall(outputType, ZETASQL_CAST_OP, ImmutableList.of(operand)); } else { ret = rexBuilder().makeCast(outputType, operand); } break; default: throw new RuntimeException("Does not support expr node kind " + expr.nodeKind()); } return ret; } private RexNode convertRexNodeFromComputedColumnWithFieldList( ResolvedComputedColumn column, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList, int windowFieldIndex) { if (column.getExpr().nodeKind() != RESOLVED_FUNCTION_CALL) { return convertRexNodeFromResolvedExpr(column.getExpr(), columnList, fieldList); } ResolvedFunctionCall functionCall = (ResolvedFunctionCall) column.getExpr(); if (functionCall.getFunction().getName().equals(FIXED_WINDOW) || functionCall.getFunction().getName().equals(SLIDING_WINDOW) || functionCall.getFunction().getName().equals(SESSION_WINDOW)) { throw new RuntimeException( functionCall.getFunction().getName() + " shouldn't appear in SELECT exprlist."); } if (!functionCall.getFunction().getGroup().equals(PRE_DEFINED_WINDOW_FUNCTIONS)) { return convertRexNodeFromResolvedExpr(column.getExpr(), columnList, fieldList); } List<RexNode> operands = new ArrayList<>(); switch (functionCall.getFunction().getName()) { case FIXED_WINDOW_START: case SLIDING_WINDOW_START: case SESSION_WINDOW_START: case SESSION_WINDOW_END: return rexBuilder() .makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex); case FIXED_WINDOW_END: operands.add( rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(0))); return rexBuilder().makeCall(SqlStdOperatorTable.PLUS, operands); case SLIDING_WINDOW_END: operands.add( rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(1))); return rexBuilder().makeCall(SqlStdOperatorTable.PLUS, operands); default: throw new RuntimeException( "Does not support window start/end: " + functionCall.getFunction().getName()); } } /** Convert a resolved literal to a RexNode. */ private RexNode convertValueToRexNode(Type type, Value value) { RexNode ret; switch (type.getKind()) { case TYPE_BOOL: case TYPE_INT32: case TYPE_INT64: case TYPE_FLOAT: case TYPE_DOUBLE: case TYPE_STRING: case TYPE_TIMESTAMP: case TYPE_DATE: case TYPE_TIME: case TYPE_BYTES: ret = convertSimpleValueToRexNode(type.getKind(), value); break; case TYPE_ARRAY: ret = convertArrayValueToRexNode(type.asArray(), value); break; case TYPE_ENUM: ret = convertEnumToRexNode(type.asEnum(), value); break; default: throw new RuntimeException( "Unsupported ResolvedLiteral kind: " + type.getKind() + " type: " + type.typeName()); } return ret; } private RexNode convertSimpleValueToRexNode(TypeKind kind, Value value) { if (value.isNull()) { return rexBuilder().makeNullLiteral(TypeUtils.toSimpleRelDataType(kind, rexBuilder())); } RexNode ret; switch (kind) { case TYPE_BOOL: ret = rexBuilder().makeLiteral(value.getBoolValue()); break; case TYPE_INT32: ret = rexBuilder() .makeExactLiteral( new BigDecimal(value.getInt32Value()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_INT64: ret = rexBuilder() .makeExactLiteral( new BigDecimal(value.getInt64Value()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_FLOAT: ret = rexBuilder() .makeApproxLiteral( new BigDecimal(value.getFloatValue()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_DOUBLE: ret = rexBuilder() .makeApproxLiteral( new BigDecimal(value.getDoubleValue()), TypeUtils.toSimpleRelDataType(kind, rexBuilder())); break; case TYPE_STRING: ret = rexBuilder() .makeLiteral( value.getStringValue(), typeFactory().createSqlType(SqlTypeName.VARCHAR), true); break; case TYPE_TIMESTAMP: ret = rexBuilder() .makeTimestampLiteral( TimestampString.fromMillisSinceEpoch( safeMicrosToMillis(value.getTimestampUnixMicros())), typeFactory().getTypeSystem().getMaxPrecision(SqlTypeName.TIMESTAMP)); break; case TYPE_DATE: ret = rexBuilder().makeDateLiteral(convertDateValueToDateString(value)); break; case TYPE_TIME: RelDataType timeType = typeFactory() .createSqlType( SqlTypeName.TIME, typeFactory().getTypeSystem().getMaxPrecision(SqlTypeName.TIME)); ret = rexBuilder().makeLiteral(convertTimeValueToTimeString(value), timeType, false); break; case TYPE_BYTES: ret = rexBuilder().makeBinaryLiteral(new ByteString(value.getBytesValue().toByteArray())); break; default: throw new RuntimeException("Unsupported column type: " + kind); } return ret; } private RexNode convertArrayValueToRexNode(ArrayType arrayType, Value value) { if (value.isNull()) { return rexBuilder() .makeNullLiteral(TypeUtils.toArrayRelDataType(rexBuilder(), arrayType, false)); } List<RexNode> operands = new ArrayList<>(); for (Value v : value.getElementList()) { operands.add(convertValueToRexNode(arrayType.getElementType(), v)); } return rexBuilder().makeCall(SqlStdOperatorTable.ARRAY_VALUE_CONSTRUCTOR, operands); } private RexNode convertEnumToRexNode(EnumType type, Value value) { if ("zetasql.functions.DateTimestampPart".equals(type.getDescriptor().getFullName())) { return convertTimeUnitRangeEnumToRexNode(type, value); } else { throw new RuntimeException( MessageFormat.format( "Unsupported enum. Kind: {0} Type: {1}", type.getKind(), type.typeName())); } } private RexNode convertTimeUnitRangeEnumToRexNode(Type type, Value value) { TimeUnit mappedUnit = TIME_UNIT_CASTING_MAP.get(value.getEnumValue()); if (mappedUnit == null) { throw new RuntimeException( MessageFormat.format( "Unsupported enum value. Kind: {0} Type: {1} Value: {2} EnumName: {3}", type.getKind(), type.typeName(), value.getEnumName(), value.getEnumValue())); } TimeUnitRange mappedRange = TimeUnitRange.of(mappedUnit, null); return rexBuilder().makeFlag(mappedRange); } private RexNode convertResolvedColumnRef( ResolvedColumnRef columnRef, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { int index = indexOfProjectionColumnRef(columnRef.getColumn().getId(), columnList); if (index < 0 || index >= columnList.size()) { throw new RuntimeException( String.format("Cannot find %s in fieldList %s", columnRef.getColumn(), fieldList)); } return rexBuilder().makeInputRef(fieldList.get(index).getType(), index); } private RexNode convertResolvedColumnRef(ResolvedColumnRef columnRef) { return rexBuilder() .makeInputRef( TypeUtils.toRelDataType(rexBuilder(), columnRef.getType(), false), (int) columnRef.getColumn().getId() - 1); } /** Return an index of the projection column reference. */ public int indexOfProjectionColumnRef(long colId, List<ResolvedColumn> columnList) { int ret = -1; for (int i = 0; i < columnList.size(); i++) { if (columnList.get(i).getId() == colId) { ret = i; break; } } return ret; } private RexNode convertResolvedFunctionCall( ResolvedFunctionCall functionCall, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { RexNode ret; SqlOperator op; List<RexNode> operands = new ArrayList<>(); if (functionCall.getFunction().getGroup().equals(PRE_DEFINED_WINDOW_FUNCTIONS)) { switch (functionCall.getFunction().getName()) { case FIXED_WINDOW: case SESSION_WINDOW: op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( functionCall.getFunction().getName()); operands.add( convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(0), columnList, fieldList)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(1))); break; case SLIDING_WINDOW: op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( SLIDING_WINDOW); operands.add( convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(0), columnList, fieldList)); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(1))); operands.add( convertIntervalToRexIntervalLiteral( (ResolvedLiteral) functionCall.getArgumentList().get(2))); break; default: throw new RuntimeException("Only support TUMBLE, HOP AND SESSION functions right now."); } } else if (functionCall.getFunction().getGroup().equals("ZetaSQL")) { op = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( functionCall.getFunction().getName()); if (op == null) { throw new RuntimeException( "Does not support ZetaSQL function: " + functionCall.getFunction().getName()); } if (FUNCTION_FAMILY_DATE_ADD.contains(functionCall.getFunction().getName())) { return convertTimestampAddFunction(functionCall, columnList, fieldList); } else { for (ResolvedExpr expr : functionCall.getArgumentList()) { operands.add(convertRexNodeFromResolvedExpr(expr, columnList, fieldList)); } } } else { throw new RuntimeException( "Does not support function group: " + functionCall.getFunction().getGroup()); } SqlOperatorRewriter rewriter = SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR_REWRITER.get( functionCall.getFunction().getName()); if (rewriter != null) { ret = rewriter.apply(rexBuilder(), operands); } else { ret = rexBuilder().makeCall(op, operands); } return ret; } private RexNode convertTimestampAddFunction( ResolvedFunctionCall functionCall, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { TimeUnit unit = TIME_UNIT_CASTING_MAP.get( ((ResolvedLiteral) functionCall.getArgumentList().get(2)).getValue().getEnumValue()); if ((unit == TimeUnit.MICROSECOND) || (unit == TimeUnit.NANOSECOND)) { throw Status.UNIMPLEMENTED .withDescription("Micro and Nanoseconds are not supported by Beam ZetaSQL") .asRuntimeException(); } SqlIntervalQualifier qualifier = new SqlIntervalQualifier(unit, null, SqlParserPos.ZERO); RexNode intervalArgumentNode = convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(1), columnList, fieldList); RexNode validatedIntervalArgument = rexBuilder() .makeCall( SqlOperators.VALIDATE_TIME_INTERVAL, intervalArgumentNode, rexBuilder().makeFlag(unit)); RexNode intervalNode = rexBuilder() .makeCall( SqlStdOperatorTable.MULTIPLY, rexBuilder().makeIntervalLiteral(unit.multiplier, qualifier), validatedIntervalArgument); RexNode timestampNode = convertRexNodeFromResolvedExpr( functionCall.getArgumentList().get(0), columnList, fieldList); RexNode dateTimePlusResult = rexBuilder().makeCall(SqlStdOperatorTable.DATETIME_PLUS, timestampNode, intervalNode); RexNode validatedTimestampResult = rexBuilder().makeCall(SqlOperators.VALIDATE_TIMESTAMP, dateTimePlusResult); return validatedTimestampResult; } private RexNode convertIntervalToRexIntervalLiteral(ResolvedLiteral resolvedLiteral) { if (resolvedLiteral.getType().getKind() != TYPE_STRING) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG); } String valStr = resolvedLiteral.getValue().getStringValue(); List<String> stringList = Arrays.stream(valStr.split(" ")).filter(s -> !s.isEmpty()).collect(Collectors.toList()); if (stringList.size() != 3) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG); } if (!Ascii.toUpperCase(stringList.get(0)).equals("INTERVAL")) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG); } long intervalValue; try { intervalValue = Long.parseLong(stringList.get(1)); } catch (NumberFormatException e) { throw new IllegalArgumentException(INTERVAL_FORMAT_MSG, e); } String intervalDatepart = Ascii.toUpperCase(stringList.get(2)); return createCalciteIntervalRexLiteral(intervalValue, intervalDatepart); } private RexLiteral createCalciteIntervalRexLiteral(long intervalValue, String intervalTimeUnit) { SqlIntervalQualifier sqlIntervalQualifier = convertIntervalDatepartToSqlIntervalQualifier(intervalTimeUnit); BigDecimal decimalValue; if (DATE_PART_UNITS_TO_MILLIS.contains(intervalTimeUnit)) { decimalValue = convertIntervalValueToMillis(sqlIntervalQualifier, intervalValue); } else if (DATE_PART_UNITS_TO_MONTHS.contains(intervalTimeUnit)) { decimalValue = new BigDecimal(intervalValue * 12); } else { decimalValue = new BigDecimal(intervalValue); } return rexBuilder().makeIntervalLiteral(decimalValue, sqlIntervalQualifier); } private static BigDecimal convertIntervalValueToMillis( SqlIntervalQualifier qualifier, long value) { switch (qualifier.typeName()) { case INTERVAL_DAY: return new BigDecimal(value * ONE_DAY_IN_MILLIS); case INTERVAL_HOUR: return new BigDecimal(value * ONE_HOUR_IN_MILLIS); case INTERVAL_MINUTE: return new BigDecimal(value * ONE_MINUTE_IN_MILLIS); case INTERVAL_SECOND: return new BigDecimal(value * ONE_SECOND_IN_MILLIS); default: throw new IllegalArgumentException(qualifier.typeName().toString()); } } private static SqlIntervalQualifier convertIntervalDatepartToSqlIntervalQualifier( String datePart) { switch (datePart) { case "YEAR": return new SqlIntervalQualifier(TimeUnit.YEAR, null, SqlParserPos.ZERO); case "MONTH": return new SqlIntervalQualifier(TimeUnit.MONTH, null, SqlParserPos.ZERO); case "DAY": return new SqlIntervalQualifier(TimeUnit.DAY, null, SqlParserPos.ZERO); case "HOUR": return new SqlIntervalQualifier(TimeUnit.HOUR, null, SqlParserPos.ZERO); case "MINUTE": return new SqlIntervalQualifier(TimeUnit.MINUTE, null, SqlParserPos.ZERO); case "SECOND": return new SqlIntervalQualifier(TimeUnit.SECOND, null, SqlParserPos.ZERO); case "WEEK": return new SqlIntervalQualifier(TimeUnit.WEEK, null, SqlParserPos.ZERO); case "QUARTER": return new SqlIntervalQualifier(TimeUnit.QUARTER, null, SqlParserPos.ZERO); case "MILLISECOND": return new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, SqlParserPos.ZERO); default: throw new RuntimeException( String.format( "Received an undefined INTERVAL unit: %s. Please specify unit from the following" + " list: %s.", datePart, INTERVAL_DATE_PART_MSG)); } } private RexNode convertResolvedCast( ResolvedCast resolvedCast, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { TypeKind fromType = resolvedCast.getExpr().getType().getKind(); TypeKind toType = resolvedCast.getType().getKind(); isCastingSupported(fromType, toType); RexNode inputNode = convertRexNodeFromResolvedExpr(resolvedCast.getExpr(), columnList, fieldList); RelDataType outputType = TypeUtils.toSimpleRelDataType( resolvedCast.getType().getKind(), rexBuilder(), inputNode.getType().isNullable()); if (isZetaSQLCast(fromType, toType)) { return rexBuilder().makeCall(outputType, ZETASQL_CAST_OP, ImmutableList.of(inputNode)); } else { return rexBuilder().makeCast(outputType, inputNode); } } private static void isCastingSupported(TypeKind fromType, TypeKind toType) { if (UNSUPPORTED_CASTING.containsKey(toType) && UNSUPPORTED_CASTING.get(toType).contains(fromType)) { throw new IllegalArgumentException( "Does not support CAST(" + fromType + " AS " + toType + ")"); } } private static boolean isZetaSQLCast(TypeKind fromType, TypeKind toType) { return (fromType.equals(TYPE_BYTES) && toType.equals(TYPE_STRING)) || (fromType.equals(TYPE_INT64) && toType.equals(TYPE_BOOL)) || (fromType.equals(TYPE_BOOL) && toType.equals(TYPE_INT64)) || (fromType.equals(TYPE_TIMESTAMP) && toType.equals(TYPE_STRING)); } private Optional<RexNode> convertRexNodeFromResolvedColumnRefWithRefScan( ResolvedColumnRef columnRef, List<ResolvedColumn> refScanColumnList, List<ResolvedColumn> originalColumnList, List<RelDataTypeField> fieldList) { for (int i = 0; i < refScanColumnList.size(); i++) { if (refScanColumnList.get(i).getId() == columnRef.getColumn().getId()) { boolean nullable = fieldList.get(i).getType().isNullable(); int off = (int) originalColumnList.get(i).getId() - 1; return Optional.of( rexBuilder() .makeInputRef( TypeUtils.toSimpleRelDataType( columnRef.getType().getKind(), rexBuilder(), nullable), off)); } } return Optional.empty(); } private RexNode convertResolvedParameter(ResolvedParameter parameter) { assert parameter.getType().equals(queryParams.get(parameter.getName()).getType()); return convertValueToRexNode( queryParams.get(parameter.getName()).getType(), queryParams.get(parameter.getName())); } private RexNode convertResolvedStructFieldAccess(ResolvedGetStructField resolvedGetStructField) { return rexBuilder() .makeFieldAccess( convertRexNodeFromResolvedExpr(resolvedGetStructField.getExpr()), (int) resolvedGetStructField.getFieldIdx()); } private RexNode convertResolvedStructFieldAccess( ResolvedGetStructField resolvedGetStructField, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList) { return rexBuilder() .makeFieldAccess( convertRexNodeFromResolvedExpr(resolvedGetStructField.getExpr(), columnList, fieldList), (int) resolvedGetStructField.getFieldIdx()); } private RexBuilder rexBuilder() { return cluster.getRexBuilder(); } private RelDataTypeFactory typeFactory() { return cluster.getTypeFactory(); } }
sorry, actually this cannot be synchronized (either using this or isReady). Because synchronized block will block other thread's same synchronized block. Here it will wait for signal indefinitely and cause deadlock.
public void testCloseVisibleToAwaitCompletionCallerAndProducer() throws Exception { BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})), Collections.emptyList()); AtomicBoolean isReady = new AtomicBoolean(false); Future<?> future = executor.submit( () -> { observer.accept(dataWith("ABC")); synchronized (isReady) { isReady.set(true); isReady.notify(); } assertThrows( BeamFnDataInboundObserver.CloseException.class, () -> { while (true) { observer.accept(dataWith("ABC")); } }); return null; }); Future<?> future2 = executor.submit( () -> { synchronized (isReady) { while (!isReady.get()) { isReady.wait(); } } observer.close(); return null; }); assertThrows(BeamFnDataInboundObserver.CloseException.class, () -> observer.awaitCompletion()); future.get(); future2.get(); }
synchronized (isReady) {
public void testCloseVisibleToAwaitCompletionCallerAndProducer() throws Exception { BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})), Collections.emptyList()); AtomicBoolean isReady = new AtomicBoolean(false); Future<?> future = executor.submit( () -> { observer.accept(dataWith("ABC")); synchronized (isReady) { isReady.set(true); isReady.notify(); } assertThrows( BeamFnDataInboundObserver.CloseException.class, () -> { while (true) { observer.accept(dataWith("ABC")); } }); return null; }); Future<?> future2 = executor.submit( () -> { synchronized (isReady) { while (!isReady.get()) { isReady.wait(); } } observer.close(); return null; }); assertThrows(BeamFnDataInboundObserver.CloseException.class, () -> observer.awaitCompletion()); future.get(); future2.get(); }
class BeamFnDataInboundObserverTest { private static final Coder<WindowedValue<String>> CODER = WindowedValue.getFullCoder(StringUtf8Coder.of(), GlobalWindow.Coder.INSTANCE); private static final String TRANSFORM_ID = "transformId"; private static final String TIMER_FAMILY_ID = "timerFamilyId"; @Rule public final TestExecutorService executor = TestExecutors.from(Executors::newCachedThreadPool); @Test public void testConsumptionOfValuesHappensOnAwaitCompletionCallersThread() throws Exception { Thread thread = Thread.currentThread(); Collection<WindowedValue<String>> values = new ArrayList<>(); Collection<WindowedValue<String>> timers = new ArrayList<>(); BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList( DataEndpoint.create( TRANSFORM_ID, CODER, (value) -> { assertSame(thread, Thread.currentThread()); values.add(value); })), Arrays.asList( TimerEndpoint.create( TRANSFORM_ID, TIMER_FAMILY_ID, CODER, (value) -> { assertSame(thread, Thread.currentThread()); timers.add(value); }))); Future<?> future = executor.submit( () -> { observer.accept(dataWith("ABC", "DEF", "GHI")); observer.accept(lastData()); observer.accept(timerWith("UVW")); observer.accept(timerWith("XYZ")); observer.accept(lastTimer()); return null; }); observer.awaitCompletion(); assertThat( values, contains( WindowedValue.valueInGlobalWindow("ABC"), WindowedValue.valueInGlobalWindow("DEF"), WindowedValue.valueInGlobalWindow("GHI"))); assertThat( timers, contains( WindowedValue.valueInGlobalWindow("UVW"), WindowedValue.valueInGlobalWindow("XYZ"))); future.get(); } @Test public void testAwaitCompletionFailureVisibleToAwaitCompletionCallerAndProducer() throws Exception { BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList( DataEndpoint.create( TRANSFORM_ID, CODER, (value) -> { throw new Exception("test consumer failed"); })), Collections.emptyList()); Future<?> future = executor.submit( () -> { observer.accept(dataWith("ABC")); assertThrows( "test consumer failed", Exception.class, () -> { while (true) { observer.accept(dataWith("ABC")); } }); return null; }); assertThrows("test consumer failed", Exception.class, () -> observer.awaitCompletion()); future.get(); } @Test @Test public void testBadProducerDataFailureVisibleToAwaitCompletionCallerAndProducer() throws Exception { BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})), Collections.emptyList()); Future<?> future = executor.submit( () -> { observer.accept(timerWith("DEF")); assertThrows( "Unable to find inbound timer receiver for instruction", IllegalStateException.class, () -> { while (true) { observer.accept(dataWith("ABC")); } }); return null; }); assertThrows( "Unable to find inbound timer receiver for instruction", IllegalStateException.class, () -> observer.awaitCompletion()); future.get(); } private BeamFnApi.Elements dataWith(String... values) throws Exception { ByteStringOutputStream output = new ByteStringOutputStream(); for (String value : values) { CODER.encode(WindowedValue.valueInGlobalWindow(value), output); } return BeamFnApi.Elements.newBuilder() .addData( BeamFnApi.Elements.Data.newBuilder() .setTransformId(TRANSFORM_ID) .setData(output.toByteString())) .build(); } private BeamFnApi.Elements lastData() throws Exception { return BeamFnApi.Elements.newBuilder() .addData(BeamFnApi.Elements.Data.newBuilder().setTransformId(TRANSFORM_ID).setIsLast(true)) .build(); } private BeamFnApi.Elements timerWith(String... values) throws Exception { ByteStringOutputStream output = new ByteStringOutputStream(); for (String value : values) { CODER.encode(WindowedValue.valueInGlobalWindow(value), output); } return BeamFnApi.Elements.newBuilder() .addTimers( BeamFnApi.Elements.Timers.newBuilder() .setTransformId(TRANSFORM_ID) .setTimerFamilyId(TIMER_FAMILY_ID) .setTimers(output.toByteString())) .build(); } private BeamFnApi.Elements lastTimer() throws Exception { return BeamFnApi.Elements.newBuilder() .addTimers( BeamFnApi.Elements.Timers.newBuilder() .setTransformId(TRANSFORM_ID) .setTimerFamilyId(TIMER_FAMILY_ID) .setIsLast(true)) .build(); } }
class BeamFnDataInboundObserverTest { private static final Coder<WindowedValue<String>> CODER = WindowedValue.getFullCoder(StringUtf8Coder.of(), GlobalWindow.Coder.INSTANCE); private static final String TRANSFORM_ID = "transformId"; private static final String TIMER_FAMILY_ID = "timerFamilyId"; @Rule public final TestExecutorService executor = TestExecutors.from(Executors::newCachedThreadPool); @Test public void testConsumptionOfValuesHappensOnAwaitCompletionCallersThread() throws Exception { Thread thread = Thread.currentThread(); Collection<WindowedValue<String>> values = new ArrayList<>(); Collection<WindowedValue<String>> timers = new ArrayList<>(); BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList( DataEndpoint.create( TRANSFORM_ID, CODER, (value) -> { assertSame(thread, Thread.currentThread()); values.add(value); })), Arrays.asList( TimerEndpoint.create( TRANSFORM_ID, TIMER_FAMILY_ID, CODER, (value) -> { assertSame(thread, Thread.currentThread()); timers.add(value); }))); Future<?> future = executor.submit( () -> { observer.accept(dataWith("ABC", "DEF", "GHI")); observer.accept(lastData()); observer.accept(timerWith("UVW")); observer.accept(timerWith("XYZ")); observer.accept(lastTimer()); return null; }); observer.awaitCompletion(); assertThat( values, contains( WindowedValue.valueInGlobalWindow("ABC"), WindowedValue.valueInGlobalWindow("DEF"), WindowedValue.valueInGlobalWindow("GHI"))); assertThat( timers, contains( WindowedValue.valueInGlobalWindow("UVW"), WindowedValue.valueInGlobalWindow("XYZ"))); future.get(); } @Test public void testAwaitCompletionFailureVisibleToAwaitCompletionCallerAndProducer() throws Exception { BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList( DataEndpoint.create( TRANSFORM_ID, CODER, (value) -> { throw new Exception("test consumer failed"); })), Collections.emptyList()); Future<?> future = executor.submit( () -> { observer.accept(dataWith("ABC")); assertThrows( "test consumer failed", Exception.class, () -> { while (true) { observer.accept(dataWith("ABC")); } }); return null; }); assertThrows("test consumer failed", Exception.class, () -> observer.awaitCompletion()); future.get(); } @Test @Test public void testBadProducerDataFailureVisibleToAwaitCompletionCallerAndProducer() throws Exception { BeamFnDataInboundObserver observer = BeamFnDataInboundObserver.forConsumers( Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})), Collections.emptyList()); Future<?> future = executor.submit( () -> { observer.accept(timerWith("DEF")); assertThrows( "Unable to find inbound timer receiver for instruction", IllegalStateException.class, () -> { while (true) { observer.accept(dataWith("ABC")); } }); return null; }); assertThrows( "Unable to find inbound timer receiver for instruction", IllegalStateException.class, () -> observer.awaitCompletion()); future.get(); } private BeamFnApi.Elements dataWith(String... values) throws Exception { ByteStringOutputStream output = new ByteStringOutputStream(); for (String value : values) { CODER.encode(WindowedValue.valueInGlobalWindow(value), output); } return BeamFnApi.Elements.newBuilder() .addData( BeamFnApi.Elements.Data.newBuilder() .setTransformId(TRANSFORM_ID) .setData(output.toByteString())) .build(); } private BeamFnApi.Elements lastData() throws Exception { return BeamFnApi.Elements.newBuilder() .addData(BeamFnApi.Elements.Data.newBuilder().setTransformId(TRANSFORM_ID).setIsLast(true)) .build(); } private BeamFnApi.Elements timerWith(String... values) throws Exception { ByteStringOutputStream output = new ByteStringOutputStream(); for (String value : values) { CODER.encode(WindowedValue.valueInGlobalWindow(value), output); } return BeamFnApi.Elements.newBuilder() .addTimers( BeamFnApi.Elements.Timers.newBuilder() .setTransformId(TRANSFORM_ID) .setTimerFamilyId(TIMER_FAMILY_ID) .setTimers(output.toByteString())) .build(); } private BeamFnApi.Elements lastTimer() throws Exception { return BeamFnApi.Elements.newBuilder() .addTimers( BeamFnApi.Elements.Timers.newBuilder() .setTransformId(TRANSFORM_ID) .setTimerFamilyId(TIMER_FAMILY_ID) .setIsLast(true)) .build(); } }
nit: I feel like splitting up ZK paths is also something that should go into ZooKeeperUtils analogously to generating the path.
private void handleChangedLeaderInformation(ChildData childData) { if (running.get() && leaderLatch.hasLeadership() && isConnectionInfoNode(childData)) { final String path = childData.getPath(); final String[] splits = path.split("/"); Preconditions.checkState( splits.length >= 2, String.format( "Expecting path consisting of <leader_name>/connection_info. Got path '%s'", path)); final String leaderName = splits[splits.length - 2]; final LeaderInformation leaderInformation = tryReadingLeaderInformation(childData, leaderName); leaderElectionListener.notifyLeaderInformationChange(leaderName, leaderInformation); } }
final String[] splits = path.split("/");
private void handleChangedLeaderInformation(ChildData childData) { if (shouldHandleLeaderInformationEvent(childData.getPath())) { final String leaderName = extractLeaderName(childData.getPath()); final LeaderInformation leaderInformation = tryReadingLeaderInformation(childData, leaderName); leaderElectionListener.notifyLeaderInformationChange(leaderName, leaderInformation); } }
class ZooKeeperMultipleComponentLeaderElectionDriver implements MultipleComponentLeaderElectionDriver, LeaderLatchListener { private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperMultipleComponentLeaderElectionDriver.class); private final CuratorFramework curatorFramework; private final String leaderContenderDescription; private final MultipleComponentLeaderElectionDriver.Listener leaderElectionListener; private final LeaderLatch leaderLatch; private final TreeCache treeCache; private final ConnectionStateListener listener = (client, newState) -> handleStateChange(newState); private AtomicBoolean running = new AtomicBoolean(true); public ZooKeeperMultipleComponentLeaderElectionDriver( CuratorFramework curatorFramework, String leaderContenderDescription, MultipleComponentLeaderElectionDriver.Listener leaderElectionListener) throws Exception { this.curatorFramework = curatorFramework; this.leaderContenderDescription = leaderContenderDescription; this.leaderElectionListener = leaderElectionListener; this.leaderLatch = new LeaderLatch(curatorFramework, ZooKeeperUtils.getLeaderLatchNode()); this.treeCache = TreeCache.newBuilder(curatorFramework, "/") .setCacheData(true) .setCreateParentNodes(false) .setSelector( new ZooKeeperMultipleComponentLeaderElectionDriver .ConnectionInfoNodeSelector()) .setExecutor(Executors.newDirectExecutorService()) .build(); treeCache .getListenable() .addListener( (client, event) -> { switch (event.getType()) { case NODE_ADDED: case NODE_REMOVED: case NODE_UPDATED: if (event.getData() != null) { handleChangedLeaderInformation(event.getData()); } } }); leaderLatch.addListener(this); curatorFramework.getConnectionStateListenable().addListener(listener); leaderLatch.start(); treeCache.start(); } @Override public void close() throws Exception { if (running.compareAndSet(true, false)) { LOG.info("Closing {}.", this); curatorFramework.getConnectionStateListenable().removeListener(listener); Exception exception = null; try { treeCache.close(); } catch (Exception e) { exception = e; } try { leaderLatch.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } ExceptionUtils.tryRethrowException(exception); } } @Override public boolean hasLeadership() { return leaderLatch.hasLeadership(); } @Override public void publishLeaderInformation(String componentId, LeaderInformation leaderInformation) throws Exception { Preconditions.checkState(running.get()); if (LOG.isDebugEnabled()) { LOG.debug("Write leader information {} for {}.", leaderInformation, componentId); } if (!leaderLatch.hasLeadership() || leaderInformation.isEmpty()) { return; } final String connectionInformationPath = ZooKeeperUtils.generateConnectionInformationPath(componentId); ZooKeeperUtils.writeLeaderInformationToZooKeeper( leaderInformation, curatorFramework, leaderLatch::hasLeadership, connectionInformationPath); } @Override public void deleteLeaderInformation(String leaderName) throws Exception { ZooKeeperUtils.deleteZNode(curatorFramework, ZooKeeperUtils.makeZooKeeperPath(leaderName)); } private void handleStateChange(ConnectionState newState) { switch (newState) { case CONNECTED: LOG.debug("Connected to ZooKeeper quorum. Leader election can start."); break; case SUSPENDED: LOG.warn("Connection to ZooKeeper suspended, waiting for reconnection."); break; case RECONNECTED: LOG.info( "Connection to ZooKeeper was reconnected. Leader election can be restarted."); break; case LOST: LOG.warn( "Connection to ZooKeeper lost. The contender " + leaderContenderDescription + " no longer participates in the leader election."); break; } } @Override public void isLeader() { LOG.debug("{} obtained the leadership.", this); leaderElectionListener.isLeader(); } @Override public void notLeader() { LOG.debug("{} lost the leadership.", this); leaderElectionListener.notLeader(); } private boolean isConnectionInfoNode(ChildData childData) { return childData.getPath().endsWith(ZooKeeperUtils.CONNECTION_INFO_NODE); } private LeaderInformation tryReadingLeaderInformation(ChildData childData, String id) { LeaderInformation leaderInformation; try { leaderInformation = ZooKeeperUtils.readLeaderInformation(childData.getData()); LOG.debug("Leader information for {} has changed to {}.", id, leaderInformation); } catch (IOException | ClassNotFoundException e) { LOG.debug( "Could not read leader information for {}. Rewriting the information.", id, e); leaderInformation = LeaderInformation.empty(); } return leaderInformation; } private static class ConnectionInfoNodeSelector implements TreeCacheSelector { @Override public boolean traverseChildren(String fullPath) { return true; } @Override public boolean acceptChild(String fullPath) { return !fullPath.endsWith(ZooKeeperUtils.getLeaderLatchNode()); } } @Override public String toString() { return "ZooKeeperMultipleComponentLeaderElectionDriver(description = \"" + leaderContenderDescription + "\")"; } }
class ZooKeeperMultipleComponentLeaderElectionDriver implements MultipleComponentLeaderElectionDriver, LeaderLatchListener { private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperMultipleComponentLeaderElectionDriver.class); private final CuratorFramework curatorFramework; private final MultipleComponentLeaderElectionDriver.Listener leaderElectionListener; private final LeaderLatch leaderLatch; private final TreeCache treeCache; private final ConnectionStateListener listener = (client, newState) -> handleStateChange(newState); private AtomicBoolean running = new AtomicBoolean(true); public ZooKeeperMultipleComponentLeaderElectionDriver( CuratorFramework curatorFramework, MultipleComponentLeaderElectionDriver.Listener leaderElectionListener) throws Exception { this.curatorFramework = Preconditions.checkNotNull(curatorFramework); this.leaderElectionListener = Preconditions.checkNotNull(leaderElectionListener); this.leaderLatch = new LeaderLatch(curatorFramework, ZooKeeperUtils.getLeaderLatchPath()); this.treeCache = TreeCache.newBuilder(curatorFramework, "/") .setCacheData(true) .setCreateParentNodes(false) .setSelector( new ZooKeeperMultipleComponentLeaderElectionDriver .ConnectionInfoNodeSelector()) .setExecutor(Executors.newDirectExecutorService()) .build(); treeCache .getListenable() .addListener( (client, event) -> { switch (event.getType()) { case NODE_ADDED: case NODE_UPDATED: Preconditions.checkNotNull( event.getData(), "The ZooKeeper event data must not be null."); handleChangedLeaderInformation(event.getData()); break; case NODE_REMOVED: Preconditions.checkNotNull( event.getData(), "The ZooKeeper event data must not be null."); handleRemovedLeaderInformation(event.getData().getPath()); break; } }); leaderLatch.addListener(this); curatorFramework.getConnectionStateListenable().addListener(listener); leaderLatch.start(); treeCache.start(); } @Override public void close() throws Exception { if (running.compareAndSet(true, false)) { LOG.info("Closing {}.", this); curatorFramework.getConnectionStateListenable().removeListener(listener); Exception exception = null; try { treeCache.close(); } catch (Exception e) { exception = e; } try { leaderLatch.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } ExceptionUtils.tryRethrowException(exception); } } @Override public boolean hasLeadership() { return leaderLatch.hasLeadership(); } @Override public void publishLeaderInformation(String componentId, LeaderInformation leaderInformation) throws Exception { Preconditions.checkState(running.get()); if (LOG.isDebugEnabled()) { LOG.debug("Write leader information {} for {}.", leaderInformation, componentId); } if (!leaderLatch.hasLeadership()) { return; } final String connectionInformationPath = ZooKeeperUtils.generateConnectionInformationPath(componentId); ZooKeeperUtils.writeLeaderInformationToZooKeeper( leaderInformation, curatorFramework, leaderLatch::hasLeadership, connectionInformationPath); } @Override public void deleteLeaderInformation(String leaderName) throws Exception { ZooKeeperUtils.deleteZNode( curatorFramework, ZooKeeperUtils.generateZookeeperPath(leaderName)); } private void handleStateChange(ConnectionState newState) { switch (newState) { case CONNECTED: LOG.debug("Connected to ZooKeeper quorum. Leader election can start."); break; case SUSPENDED: LOG.warn("Connection to ZooKeeper suspended, waiting for reconnection."); break; case RECONNECTED: LOG.info( "Connection to ZooKeeper was reconnected. Leader election can be restarted."); break; case LOST: LOG.warn( "Connection to ZooKeeper lost. The contender no longer participates in the leader election."); break; } } @Override public void isLeader() { LOG.debug("{} obtained the leadership.", this); leaderElectionListener.isLeader(); } @Override public void notLeader() { LOG.debug("{} lost the leadership.", this); leaderElectionListener.notLeader(); } private String extractLeaderName(String path) { final String[] splits = ZooKeeperUtils.splitZooKeeperPath(path); Preconditions.checkState( splits.length >= 2, String.format( "Expecting path consisting of /<leader_name>/connection_info. Got path '%s'", path)); return splits[splits.length - 2]; } private void handleRemovedLeaderInformation(String removedNodePath) { if (shouldHandleLeaderInformationEvent(removedNodePath)) { final String leaderName = extractLeaderName(removedNodePath); leaderElectionListener.notifyLeaderInformationChange( leaderName, LeaderInformation.empty()); } } private boolean shouldHandleLeaderInformationEvent(String path) { return running.get() && leaderLatch.hasLeadership() && isConnectionInfoNode(path); } private boolean isConnectionInfoNode(String path) { return path.endsWith(ZooKeeperUtils.CONNECTION_INFO_NODE); } private LeaderInformation tryReadingLeaderInformation(ChildData childData, String id) { LeaderInformation leaderInformation; try { leaderInformation = ZooKeeperUtils.readLeaderInformation(childData.getData()); LOG.debug("Leader information for {} has changed to {}.", id, leaderInformation); } catch (IOException | ClassNotFoundException e) { LOG.debug( "Could not read leader information for {}. Rewriting the information.", id, e); leaderInformation = LeaderInformation.empty(); } return leaderInformation; } /** * This selector finds all connection info nodes. See {@link * org.apache.flink.runtime.highavailability.zookeeper.ZooKeeperMultipleComponentLeaderElectionHaServices} * for more details on the Znode layout. */ private static class ConnectionInfoNodeSelector implements TreeCacheSelector { @Override public boolean traverseChildren(String fullPath) { return true; } @Override public boolean acceptChild(String fullPath) { return !fullPath.endsWith(ZooKeeperUtils.getLeaderLatchPath()); } } @Override public String toString() { return "ZooKeeperMultipleComponentLeaderElectionDriver"; } }
why are you changing this?
private static Configuration getConfiguration() { Configuration configuration = new Configuration(); configuration.set(KubernetesConfigOptions.CLUSTER_ID, CLUSTER_ID); configuration.set( HighAvailabilityOptions.HA_MODE, KubernetesHaServicesFactory.class.getCanonicalName()); try { temporaryPath = Files.createTempDirectory("haStorage"); } catch (IOException e) { throw new RuntimeException("can't create ha storage path."); } configuration.set( HighAvailabilityOptions.HA_STORAGE_PATH, temporaryPath.toAbsolutePath().toString()); return configuration; }
throw new RuntimeException("can't create ha storage path.");
private static Configuration getConfiguration() { Configuration configuration = new Configuration(); configuration.set(KubernetesConfigOptions.CLUSTER_ID, CLUSTER_ID); configuration.set( HighAvailabilityOptions.HA_MODE, KubernetesHaServicesFactory.class.getCanonicalName()); try { temporaryPath = Files.createTempDirectory("haStorage"); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create HA storage", e); } configuration.set( HighAvailabilityOptions.HA_STORAGE_PATH, temporaryPath.toAbsolutePath().toString()); return configuration; }
class KubernetesHighAvailabilityRecoverFromSavepointITCase { private static final long TIMEOUT = 60 * 1000; private static final String CLUSTER_ID = "flink-on-k8s-cluster-" + System.currentTimeMillis(); private static final String FLAT_MAP_UID = "my-flat-map"; private static Path temporaryPath; @RegisterExtension @Order(1) private static final MiniClusterExtension miniClusterExtension = new MiniClusterExtension( new MiniClusterResourceConfiguration.Builder() .setConfiguration(getConfiguration()) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(1) .build()); @RegisterExtension @Order(2) private static final KubernetesExtension kubernetesExtension = new KubernetesExtension(); private ClusterClient<?> clusterClient; private String savepointPath; @BeforeEach private void setup(@InjectClusterClient ClusterClient<?> clusterClient) throws Exception { this.clusterClient = clusterClient; this.savepointPath = Files.createDirectory(temporaryPath.resolve("savepoints")) .toAbsolutePath() .toString(); } @Test void testRecoverFromSavepoint() throws Exception { Path stateBackend1 = Files.createDirectory(temporaryPath.resolve("stateBackend1")); final JobGraph jobGraph = createJobGraph(stateBackend1.toFile()); clusterClient .submitJob(jobGraph) .get(TestingUtils.infiniteTime().toMilliseconds(), TimeUnit.MILLISECONDS); CommonTestUtils.waitUntilCondition( () -> triggerSavepoint(clusterClient, jobGraph.getJobID(), savepointPath) != null, 1000); final String savepoint2Path = triggerSavepoint(clusterClient, jobGraph.getJobID(), savepointPath); clusterClient.cancel(jobGraph.getJobID()); CommonTestUtils.waitUntilCondition( () -> clusterClient.getJobStatus(jobGraph.getJobID()).get() == JobStatus.CANCELED, 1000); Path stateBackend2 = Files.createDirectory(temporaryPath.resolve("stateBackend2")); final JobGraph jobGraphWithSavepoint = createJobGraph(stateBackend2.toFile()); final JobID jobId = jobGraphWithSavepoint.getJobID(); jobGraphWithSavepoint.setSavepointRestoreSettings( SavepointRestoreSettings.forPath(savepoint2Path)); clusterClient.submitJob(jobGraphWithSavepoint).get(TIMEOUT, TimeUnit.MILLISECONDS); assertThat(clusterClient.requestJobResult(jobId).join().isSuccess()).isTrue(); } private String triggerSavepoint(ClusterClient<?> clusterClient, JobID jobID, String path) { try { return String.valueOf( clusterClient .triggerSavepoint(jobID, path, SavepointFormatType.CANONICAL) .get(TIMEOUT, TimeUnit.MILLISECONDS)); } catch (Exception ex) { } return null; } private JobGraph createJobGraph(File stateBackendFolder) throws Exception { final StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); final StateBackend stateBackend = new FsStateBackend(stateBackendFolder.toURI(), 1); sEnv.setStateBackend(stateBackend); sEnv.addSource(new InfiniteSourceFunction()) .keyBy(e -> e) .flatMap( new RichFlatMapFunction<Integer, Integer>() { private static final long serialVersionUID = 1L; ValueState<Integer> state; @Override public void open(Configuration parameters) throws Exception { super.open(parameters); ValueStateDescriptor<Integer> descriptor = new ValueStateDescriptor<>("total", Types.INT); state = getRuntimeContext().getState(descriptor); } @Override public void flatMap(Integer value, Collector<Integer> out) throws Exception { final Integer current = state.value(); if (current != null) { value += current; } state.update(value); out.collect(value); } }) .uid(FLAT_MAP_UID) .addSink(new DiscardingSink<>()); return sEnv.getStreamGraph().getJobGraph(); } private static final class InfiniteSourceFunction extends RichParallelSourceFunction<Integer> implements CheckpointedFunction { private static final long serialVersionUID = 1L; private final ListStateDescriptor<Integer> hasExecutedBeforeStateDescriptor = new ListStateDescriptor<>("hasExecutedBefore", BasicTypeInfo.INT_TYPE_INFO); private volatile boolean running = true; @Override public void run(SourceContext<Integer> ctx) throws Exception { final Random random = new Random(); while (running) { synchronized (ctx.getCheckpointLock()) { ctx.collect(random.nextInt()); } Thread.sleep(5L); } } @Override public void cancel() { running = false; } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception {} @Override public void initializeState(FunctionInitializationContext context) throws Exception { final ListState<Integer> stateFromSavepoint = context.getOperatorStateStore() .getUnionListState(hasExecutedBeforeStateDescriptor); if (stateFromSavepoint.get().iterator().hasNext()) { running = false; } stateFromSavepoint.clear(); stateFromSavepoint.add(getRuntimeContext().getIndexOfThisSubtask()); } } }
class KubernetesHighAvailabilityRecoverFromSavepointITCase { private static final long TIMEOUT = 60 * 1000; private static final String CLUSTER_ID = "flink-on-k8s-cluster-" + System.currentTimeMillis(); private static final String FLAT_MAP_UID = "my-flat-map"; private static Path temporaryPath; @RegisterExtension private static final MiniClusterExtension miniClusterExtension = new MiniClusterExtension( new MiniClusterResourceConfiguration.Builder() .setConfiguration(getConfiguration()) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(1) .build()); @RegisterExtension private static final KubernetesExtension kubernetesExtension = new KubernetesExtension(); private ClusterClient<?> clusterClient; private String savepointPath; @BeforeEach void setup(@InjectClusterClient ClusterClient<?> clusterClient) throws Exception { this.clusterClient = clusterClient; this.savepointPath = Files.createDirectory(temporaryPath.resolve("savepoints")) .toAbsolutePath() .toString(); } @Test void testRecoverFromSavepoint() throws Exception { Path stateBackend1 = Files.createDirectory(temporaryPath.resolve("stateBackend1")); final JobGraph jobGraph = createJobGraph(stateBackend1.toFile()); clusterClient .submitJob(jobGraph) .get(TestingUtils.infiniteTime().toMilliseconds(), TimeUnit.MILLISECONDS); CommonTestUtils.waitUntilCondition( () -> triggerSavepoint(clusterClient, jobGraph.getJobID(), savepointPath) != null, 1000); final String savepoint2Path = triggerSavepoint(clusterClient, jobGraph.getJobID(), savepointPath); clusterClient.cancel(jobGraph.getJobID()); CommonTestUtils.waitUntilCondition( () -> clusterClient.getJobStatus(jobGraph.getJobID()).get() == JobStatus.CANCELED, 1000); Path stateBackend2 = Files.createDirectory(temporaryPath.resolve("stateBackend2")); final JobGraph jobGraphWithSavepoint = createJobGraph(stateBackend2.toFile()); final JobID jobId = jobGraphWithSavepoint.getJobID(); jobGraphWithSavepoint.setSavepointRestoreSettings( SavepointRestoreSettings.forPath(savepoint2Path)); clusterClient.submitJob(jobGraphWithSavepoint).get(TIMEOUT, TimeUnit.MILLISECONDS); assertThat(clusterClient.requestJobResult(jobId).join().isSuccess()).isTrue(); } private String triggerSavepoint(ClusterClient<?> clusterClient, JobID jobID, String path) { try { return String.valueOf( clusterClient .triggerSavepoint(jobID, path, SavepointFormatType.CANONICAL) .get(TIMEOUT, TimeUnit.MILLISECONDS)); } catch (Exception ex) { } return null; } private JobGraph createJobGraph(File stateBackendFolder) throws Exception { final StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); final StateBackend stateBackend = new FsStateBackend(stateBackendFolder.toURI(), 1); sEnv.setStateBackend(stateBackend); sEnv.addSource(new InfiniteSourceFunction()) .keyBy(e -> e) .flatMap( new RichFlatMapFunction<Integer, Integer>() { private static final long serialVersionUID = 1L; ValueState<Integer> state; @Override public void open(Configuration parameters) throws Exception { super.open(parameters); ValueStateDescriptor<Integer> descriptor = new ValueStateDescriptor<>("total", Types.INT); state = getRuntimeContext().getState(descriptor); } @Override public void flatMap(Integer value, Collector<Integer> out) throws Exception { final Integer current = state.value(); if (current != null) { value += current; } state.update(value); out.collect(value); } }) .uid(FLAT_MAP_UID) .addSink(new DiscardingSink<>()); return sEnv.getStreamGraph().getJobGraph(); } private static final class InfiniteSourceFunction extends RichParallelSourceFunction<Integer> implements CheckpointedFunction { private static final long serialVersionUID = 1L; private final ListStateDescriptor<Integer> hasExecutedBeforeStateDescriptor = new ListStateDescriptor<>("hasExecutedBefore", BasicTypeInfo.INT_TYPE_INFO); private volatile boolean running = true; @Override public void run(SourceContext<Integer> ctx) throws Exception { final Random random = new Random(); while (running) { synchronized (ctx.getCheckpointLock()) { ctx.collect(random.nextInt()); } Thread.sleep(5L); } } @Override public void cancel() { running = false; } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception {} @Override public void initializeState(FunctionInitializationContext context) throws Exception { final ListState<Integer> stateFromSavepoint = context.getOperatorStateStore() .getUnionListState(hasExecutedBeforeStateDescriptor); if (stateFromSavepoint.get().iterator().hasNext()) { running = false; } stateFromSavepoint.clear(); stateFromSavepoint.add(getRuntimeContext().getIndexOfThisSubtask()); } } }
use Arrays.asList instead of deps to Guava
public void testSimpleTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 1"), Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); }
ImmutableList.of(Row.of("val 1"), Row.of("val 2"), Row.of("val 3"));
public void testSimpleTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Arrays.asList(Row.of("val 1"), Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); }
class AsyncCalcITCase extends AbstractTestBase { private StreamExecutionEnvironment env; private TableEnvironment tEnv; @BeforeEach public void before() throws Exception { env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setMaxParallelism(1); tEnv = StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode()); tEnv.getConfig().set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1); tEnv.getConfig().set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_BUFFER_CAPACITY, 1); tEnv.getConfig() .set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_TIMEOUT, Duration.ofMinutes(1)); } @Test @Test public void testLiteralPlusTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select 'foo', func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of( Row.of("foo", "val 1"), Row.of("foo", "val 2"), Row.of("foo", "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldPlusTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select f1, func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of(1, "val 1"), Row.of(2, "val 2"), Row.of(3, "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testTwoCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1), func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of( Row.of("val 1", "val 1"), Row.of("val 2", "val 2"), Row.of("val 3", "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testNestedCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncAdd10()); final List<Row> results = executeSql("select func(func(func(f1))) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of(31), Row.of(32), Row.of(33)); assertThat(results).containsSequence(expectedRows); } @Test public void testThreeNestedCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncAdd10()); final List<Row> results = executeSql("select func(func(f1)), func(func(func(f1))), func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of(21, 31, 11), Row.of(22, 32, 12), Row.of(23, 33, 13)); assertThat(results).containsSequence(expectedRows); } @Test public void testPassedToOtherUDF() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select Concat(func(f1), 'foo') from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 1foo"), Row.of("val 2foo"), Row.of("val 3foo")); assertThat(results).containsSequence(expectedRows); } @Test public void testJustCall() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(1)"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 1")); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereCondition() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select f1 from t1 where REGEXP(func(f1), 'val (2|3)')"); final List<Row> expectedRows = ImmutableList.of(Row.of(2), Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereConditionAndProjection() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1 where REGEXP(func(f1), 'val (2|3)')"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereConditionWithInts() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncAdd10()); final List<Row> results = executeSql("select f1 from t1 where func(f1) >= 12"); final List<Row> expectedRows = ImmutableList.of(Row.of(2), Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testAggregate() { Table t1 = tEnv.fromValues(1, 2, 3, 1, 3, 4).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncAdd10Long()); final List<Row> results = executeSql("select f1, func(count(*)) from t1 group by f1"); final List<Row> expectedRows = ImmutableList.of( Row.of(1, 11L), Row.of(2, 11L), Row.of(3, 11L), Row.ofKind(RowKind.UPDATE_BEFORE, 1, 11L), Row.ofKind(RowKind.UPDATE_AFTER, 1, 12L), Row.ofKind(RowKind.UPDATE_BEFORE, 3, 11L), Row.ofKind(RowKind.UPDATE_AFTER, 3, 12L), Row.of(4, 11L)); assertThat(results).containsSequence(expectedRows); } @Test public void testSelectCallWithIntArray() { Table t1 = tEnv.fromValues(new int[] {1, 2}, new int[] {3, 4}).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncAdd10IntArray()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of( Row.of(new Object[] {new Integer[] {11, 12}}), Row.of(new Object[] {new Integer[] {13, 14}})); assertThat(results).containsExactlyInAnyOrderElementsOf(expectedRows); } @Test public void testInnerJoinWithFuncInOn() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql( "select f1 from t1 INNER JOIN t2 ON func(f1) = func(f2) AND REGEXP(func(f1), 'val (2|4)')"); final List<Row> expectedRows = ImmutableList.of(Row.of(2), Row.of(4)); assertThat(results).containsSequence(expectedRows); } @Test public void testInnerJoinWithFuncProjection() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1 INNER JOIN t2 ON f1 = f2"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 2"), Row.of("val 4")); assertThat(results).containsSequence(expectedRows); } @Test public void testInnerJoinWithFuncInWhere() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql( "select f1 from t1 INNER JOIN t2 ON f1 = f2 WHERE REGEXP(func(f1), 'val (2|3)')"); final List<Row> expectedRows = ImmutableList.of(Row.of(2)); assertThat(results).containsSequence(expectedRows); } @Test public void testLeftJoinWithFuncInOn() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select f1, f2 from t1 LEFT JOIN t2 ON func(f1) = func(f2)"); final List<Row> expectedRows = ImmutableList.of(Row.of(1, null), Row.of(2, 2), Row.of(3, null), Row.of(4, 4)); assertThat(results).containsSequence(expectedRows); } @Test public void testLeftJoinWithFuncInWhere() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql( "select f1, f2 from t1 LEFT JOIN t2 ON f1 = f2 WHERE REGEXP(func(f1), 'val (2|3)')"); final List<Row> expectedRows = ImmutableList.of(Row.of(2, 2), Row.of(3, null)); assertThat(results).containsSequence(expectedRows); } @Test public void testRightJoinWithFuncInOn() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select f1, f2 from t1 FULL OUTER JOIN t2 ON func(f1) = func(f2)"); assertThat(results).hasSize(8); } @Test public void testSelectWithConfigs() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.getConfig() .set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_BUFFER_CAPACITY.key(), "10"); tEnv.getConfig().set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_TIMEOUT.key(), "1m"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 1"), Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testProjectCallInSubquery() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql( "select blah FROM (select func(f1) as blah from t1) " + "WHERE REGEXP(blah, 'val (2|3)')"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereConditionCallInSubquery() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql( "select blah FROM (select f1 as blah from t1 " + "WHERE REGEXP(func(f1), 'val (2|3)'))"); final List<Row> expectedRows = ImmutableList.of(Row.of(2), Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereNotInSubquery() { Table t1 = tEnv.fromValues(1, 2, 3, 4).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) FROM t1 where f1 not in (select f2 from t2)"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 1"), Row.of("val 3")); assertThat(results).containsSubsequence(expectedRows); } @Test public void testSimpleTableSelectWithFallback() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); Table t2 = tEnv.fromValues(2, 4).as("f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporaryFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1 INNER JOIN t2 ON f1 = f2"); final List<Row> expectedRows = ImmutableList.of(Row.of("val 2")); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldAccessAfter() { Table t1 = tEnv.fromValues(2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncRow()); final List<Row> results = executeSql("select func(f1).f0 from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldOperand() { Table t1 = tEnv.fromValues(2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncRow()); tEnv.createTemporaryFunction("func2", new AsyncFuncAdd10()); Table structs = tEnv.sqlQuery("select func(f1) from t1"); tEnv.createTemporaryView("t2", structs); final List<Row> results = executeSql("select func2(t2.f0) from t2"); final List<Row> expectedRows = ImmutableList.of(Row.of(13)); assertThat(results).containsSequence(expectedRows); } @Test public void testOverload() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new AsyncFuncOverload()); final List<Row> results = executeSql("select func(f1), func(cast(f1 as String)) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of("int version 1", "string version 1")); assertThat(results).containsSequence(expectedRows); } @Test public void testMultiLayerGeneric() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryFunction("func", new LongAsyncFuncGeneric()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of(11L)); assertThat(results).containsSequence(expectedRows); } @Test public void testFailures() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); AsyncFuncFail func = new AsyncFuncFail(2); tEnv.createTemporaryFunction("func", func); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = ImmutableList.of(Row.of(3)); assertThat(results).containsSequence(expectedRows); } private List<Row> executeSql(String sql) { TableResult result = tEnv.executeSql(sql); final List<Row> rows = new ArrayList<>(); result.collect().forEachRemaining(rows::add); return rows; } /** Test function. */ public static class AsyncFunc extends AsyncFuncBase { private static final long serialVersionUID = 1L; public void eval(CompletableFuture<String> future, Integer param) { executor.schedule(() -> future.complete("val " + param), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncAdd10 extends AsyncFuncBase { private static final long serialVersionUID = 2L; public void eval(CompletableFuture<Integer> future, Integer param) { executor.schedule(() -> future.complete(param + 10), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncOverload extends AsyncFuncBase { private static final long serialVersionUID = 3L; public void eval(CompletableFuture<String> future, Integer param) { executor.schedule( () -> future.complete("int version " + param), 10, TimeUnit.MILLISECONDS); } public void eval(CompletableFuture<String> future, String param) { executor.schedule( () -> future.complete("string version " + param), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncAdd10Long extends AsyncFuncBase { private static final long serialVersionUID = 2L; public void eval(CompletableFuture<Long> future, Long param) { executor.schedule(() -> future.complete(param + 10), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncAdd10IntArray extends AsyncFuncBase { private static final long serialVersionUID = 3L; public void eval(CompletableFuture<int[]> future, int[] param) { for (int i = 0; i < param.length; i++) { param[i] += 10; } executor.schedule(() -> future.complete(param), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncRow extends AsyncScalarFunction { @DataTypeHint("ROW<f0 INT, f1 String>") public void eval(CompletableFuture<Row> future, int a) { future.complete(Row.of(a + 1, "" + (a * a))); } } /** Test function. */ public static class AsyncFuncFail extends AsyncFuncBase implements Serializable { private static final long serialVersionUID = 8996145425452974113L; private final int numFailures; private final AtomicInteger failures = new AtomicInteger(0); public AsyncFuncFail(int numFailures) { this.numFailures = numFailures; } public void eval(CompletableFuture<Integer> future, int a) { if (failures.getAndIncrement() < numFailures) { future.completeExceptionally(new RuntimeException("Error " + failures.get())); return; } future.complete(failures.get()); } } /** Test function. */ public abstract static class AsyncFuncGeneric<T> extends AsyncFuncBase { private static final long serialVersionUID = 3L; abstract T newT(int param); public void eval(CompletableFuture<T> future, Integer param) { executor.schedule(() -> future.complete(newT(param)), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class LongAsyncFuncGeneric extends AsyncFuncGeneric<Long> { @Override Long newT(int param) { return 10L + param; } } /** Test function. */ public static class AsyncFuncBase extends AsyncScalarFunction { protected ScheduledExecutorService executor; @Override public void open(FunctionContext context) { executor = Executors.newSingleThreadScheduledExecutor(); } @Override public void close() { if (null != executor && !executor.isShutdown()) { executor.shutdownNow(); } } } }
class AsyncCalcITCase extends StreamingTestBase { private TableEnvironment tEnv; @BeforeEach public void before() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); tEnv = StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode()); tEnv.getConfig().set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_BUFFER_CAPACITY, 2); tEnv.getConfig() .set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_TIMEOUT, Duration.ofMinutes(1)); } @Test @Test public void testLiteralPlusTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select 'foo', func(f1) from t1"); final List<Row> expectedRows = Arrays.asList( Row.of("foo", "val 1"), Row.of("foo", "val 2"), Row.of("foo", "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldPlusTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select f1, func(f1) from t1"); final List<Row> expectedRows = Arrays.asList(Row.of(1, "val 1"), Row.of(2, "val 2"), Row.of(3, "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testTwoCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1), func(f1) from t1"); final List<Row> expectedRows = Arrays.asList( Row.of("val 1", "val 1"), Row.of("val 2", "val 2"), Row.of("val 3", "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testThreeNestedCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncAdd10()); final List<Row> results = executeSql("select func(func(f1)), func(func(func(f1))), func(f1) from t1"); final List<Row> expectedRows = Arrays.asList(Row.of(21, 31, 11), Row.of(22, 32, 12), Row.of(23, 33, 13)); assertThat(results).containsSequence(expectedRows); } @Test public void testPassedToOtherUDF() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select Concat(func(f1), 'foo') from t1"); final List<Row> expectedRows = Arrays.asList(Row.of("val 1foo"), Row.of("val 2foo"), Row.of("val 3foo")); assertThat(results).containsSequence(expectedRows); } @Test public void testJustCall() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(1)"); final List<Row> expectedRows = Collections.singletonList(Row.of("val 1")); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereConditionAndProjection() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1 where REGEXP(func(f1), 'val (2|3)')"); final List<Row> expectedRows = Arrays.asList(Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldAccessAfter() { Table t1 = tEnv.fromValues(2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncRow()); final List<Row> results = executeSql("select func(f1).f0 from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldOperand() { Table t1 = tEnv.fromValues(2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncRow()); tEnv.createTemporarySystemFunction("func2", new AsyncFuncAdd10()); Table structs = tEnv.sqlQuery("select func(f1) from t1"); tEnv.createTemporaryView("t2", structs); final List<Row> results = executeSql("select func2(t2.f0) from t2"); final List<Row> expectedRows = Collections.singletonList(Row.of(13)); assertThat(results).containsSequence(expectedRows); } @Test public void testOverload() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncOverload()); final List<Row> results = executeSql("select func(f1), func(cast(f1 as String)) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of("int version 1", "string version 1")); assertThat(results).containsSequence(expectedRows); } @Test public void testMultiLayerGeneric() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new LongAsyncFuncGeneric()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of((Object) new Long[] {11L})); assertThat(results).containsSequence(expectedRows); } @Test public void testMultiLayerMoreGeneric() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new LongAsyncFuncMoreGeneric()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of((Object) new Long[] {11L})); assertThat(results).containsSequence(expectedRows); } @Test public void testFailures() { tEnv.getConfig().set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_BUFFER_CAPACITY, 1); Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); AsyncFuncFail func = new AsyncFuncFail(2); tEnv.createTemporarySystemFunction("func", func); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of(3)); assertThat(results).containsSequence(expectedRows); } private List<Row> executeSql(String sql) { TableResult result = tEnv.executeSql(sql); final List<Row> rows = new ArrayList<>(); result.collect().forEachRemaining(rows::add); return rows; } /** Test function. */ public static class AsyncFunc extends AsyncFuncBase { private static final long serialVersionUID = 1L; public void eval(CompletableFuture<String> future, Integer param) { executor.schedule(() -> future.complete("val " + param), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncAdd10 extends AsyncFuncBase { private static final long serialVersionUID = 2L; public void eval(CompletableFuture<Integer> future, Integer param) { executor.schedule(() -> future.complete(param + 10), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncOverload extends AsyncFuncBase { private static final long serialVersionUID = 3L; public void eval(CompletableFuture<String> future, Integer param) { executor.schedule( () -> future.complete("int version " + param), 10, TimeUnit.MILLISECONDS); } public void eval(CompletableFuture<String> future, String param) { executor.schedule( () -> future.complete("string version " + param), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class AsyncFuncRow extends AsyncScalarFunction { @DataTypeHint("ROW<f0 INT, f1 String>") public void eval(CompletableFuture<Row> future, int a) { future.complete(Row.of(a + 1, "" + (a * a))); } } /** Test function. */ public static class AsyncFuncFail extends AsyncFuncBase implements Serializable { private static final long serialVersionUID = 8996145425452974113L; private final int numFailures; private final AtomicInteger failures = new AtomicInteger(0); public AsyncFuncFail(int numFailures) { this.numFailures = numFailures; } public void eval(CompletableFuture<Integer> future, int ignoredA) { if (failures.getAndIncrement() < numFailures) { future.completeExceptionally(new RuntimeException("Error " + failures.get())); return; } future.complete(failures.get()); } } /** Test function. */ public abstract static class AsyncFuncGeneric<T> extends AsyncFuncBase { private static final long serialVersionUID = 3L; abstract T[] newT(int param); public void eval(CompletableFuture<T[]> future, Integer param) { executor.schedule(() -> future.complete(newT(param)), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class LongAsyncFuncGeneric extends AsyncFuncGeneric<Long> { @Override Long[] newT(int param) { Long[] result = new Long[1]; result[0] = 10L + param; return result; } } /** Test function. */ public abstract static class AsyncFuncMoreGeneric<T> extends AsyncFuncBase { private static final long serialVersionUID = 3L; abstract void finish(T future, int param); public void eval(T future, Integer param) { executor.schedule(() -> finish(future, param), 10, TimeUnit.MILLISECONDS); } } /** Test function. */ public static class LongAsyncFuncMoreGeneric extends AsyncFuncMoreGeneric<CompletableFuture<Long[]>> { @Override void finish(CompletableFuture<Long[]> future, int param) { Long[] result = new Long[1]; result[0] = 10L + param; future.complete(result); } } /** Test function. */ public static class AsyncFuncBase extends AsyncScalarFunction { protected ScheduledExecutorService executor; @Override public void open(FunctionContext context) { executor = Executors.newSingleThreadScheduledExecutor(); } @Override public void close() { if (null != executor && !executor.isShutdown()) { executor.shutdownNow(); } } } }
I see your point, but I think that preventing the users from creating the project just because the wrapper is missing will be more of a problem than (slightly) incorrect instructions. I am willing to bet that most users will understand that Maven wrapper is missing once they do `./mvnw ...` and get the generic OS error message. WDYT?
private void createMavenWrapper() { try { executeMojo( plugin( groupId("io.takari"), artifactId("maven"), version(MojoUtils.getMavenWrapperVersion())), goal("wrapper"), configuration( element(name("maven"), MojoUtils.getProposedMavenVersion())), executionEnvironment( project, session, pluginManager)); } catch (Exception e) { getLog().debug("Unable to create Maven Wrapper"); } }
getLog().debug("Unable to create Maven Wrapper");
private void createMavenWrapper() { try { executeMojo( plugin( groupId("io.takari"), artifactId("maven"), version(MojoUtils.getMavenWrapperVersion())), goal("wrapper"), configuration( element(name("maven"), MojoUtils.getProposedMavenVersion())), executionEnvironment( project, session, pluginManager)); } catch (Exception e) { getLog().error("Unable to install the Maven wrapper (./mvnw) in the project"); } }
class CreateProjectMojo extends AbstractMojo { public static final String PLUGIN_KEY = MojoUtils.getPluginGroupId() + ":" + MojoUtils.getPluginArtifactId(); private static final String DEFAULT_GROUP_ID = "org.acme.quarkus.sample"; @Parameter(defaultValue = "${project}") protected MavenProject project; @Parameter(property = "projectGroupId") private String projectGroupId; @Parameter(property = "projectArtifactId") private String projectArtifactId; @Parameter(property = "projectVersion") private String projectVersion; @Parameter(property = "path") private String path; @Parameter(property = "className") private String className; @Parameter(property = "extensions") private Set<String> extensions; @Parameter(defaultValue = "${session}") private MavenSession session; @Component private Prompter prompter; @Component private MavenVersionEnforcer mavenVersionEnforcer; @Component private BuildPluginManager pluginManager; @Override public void execute() throws MojoExecutionException { mavenVersionEnforcer.ensureMavenVersion(getLog(), session); File projectRoot = new File("."); File pom = new File(projectRoot, "pom.xml"); if (pom.isFile()) { if (!StringUtils.isBlank(projectGroupId) || !StringUtils.isBlank(projectArtifactId) || !StringUtils.isBlank(projectVersion)) { throw new MojoExecutionException("Unable to generate the project, the `projectGroupId`, " + "`projectArtifactId` and `projectVersion` parameters are not supported when applied to an " + "existing `pom.xml` file"); } projectGroupId = project.getGroupId(); projectArtifactId = project.getArtifactId(); projectVersion = project.getVersion(); } else { askTheUserForMissingValues(); if (!isDirectoryEmpty(projectRoot)) { projectRoot = new File(projectArtifactId); if (projectRoot.exists()) { throw new MojoExecutionException("Unable to create the project - the current directory is not empty and" + " the directory " + projectArtifactId + " exists"); } } } boolean success; try { sanitizeOptions(); final Map<String, Object> context = new HashMap<>(); context.put("className", className); context.put("path", path); success = new CreateProject(projectRoot) .groupId(projectGroupId) .artifactId(projectArtifactId) .version(projectVersion) .sourceType(determineSourceType(extensions)) .doCreateProject(context); if (success) { new AddExtensions(new File(projectRoot, "pom.xml")) .addExtensions(extensions); } createMavenWrapper(); } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } if (success) { printUserInstructions(projectRoot); } } private SourceType determineSourceType(Set<String> extensions) { return extensions.stream().anyMatch(e -> e.toLowerCase().contains("kotlin")) ? SourceType.KOTLIN : SourceType.JAVA; } private void askTheUserForMissingValues() throws MojoExecutionException { if (!session.getRequest().isInteractiveMode() || shouldUseDefaults()) { if (StringUtils.isBlank(projectGroupId)) { projectGroupId = DEFAULT_GROUP_ID; } if (StringUtils.isBlank(projectArtifactId)) { projectArtifactId = "my-quarkus-project"; } if (StringUtils.isBlank(projectVersion)) { projectVersion = "1.0-SNAPSHOT"; } return; } try { if (StringUtils.isBlank(projectGroupId)) { projectGroupId = prompter.promptWithDefaultValue("Set the project groupId", DEFAULT_GROUP_ID); } if (StringUtils.isBlank(projectArtifactId)) { projectArtifactId = prompter.promptWithDefaultValue("Set the project artifactId", "my-quarkus-project"); } if (StringUtils.isBlank(projectVersion)) { projectVersion = prompter.promptWithDefaultValue("Set the Quarkus version", "1.0-SNAPSHOT"); } if (StringUtils.isBlank(className)) { String answer = prompter.promptWithDefaultValue("Do you want to create a REST resource? (y/n)", "no"); if (isTrueOrYes(answer)) { String defaultResourceName = projectGroupId.replace("-", ".") .replace("_", ".") + ".HelloResource"; className = prompter.promptWithDefaultValue("Set the resource classname", defaultResourceName); if (StringUtils.isBlank(path)) { path = prompter.promptWithDefaultValue("Set the resource path ", "/hello"); } } else { className = null; path = null; } } } catch (IOException e) { throw new MojoExecutionException("Unable to get user input", e); } } private boolean shouldUseDefaults() { return projectArtifactId != null; } private boolean isTrueOrYes(String answer) { if (answer == null) { return false; } String content = answer.trim().toLowerCase(); return "true".equalsIgnoreCase(content) || "yes".equalsIgnoreCase(content) || "y".equalsIgnoreCase(content); } private void sanitizeOptions() { if (className != null) { if (className.endsWith(MojoUtils.JAVA_EXTENSION)) { className = className.substring(0, className.length() - MojoUtils.JAVA_EXTENSION.length()); } else if (className.endsWith(MojoUtils.KOTLIN_EXTENSION)) { className = className.substring(0, className.length() - MojoUtils.KOTLIN_EXTENSION.length()); } if (!className.contains(".")) { className = projectGroupId.replace("-", ".").replace("_", ".") + "." + className; } if (StringUtils.isBlank(path)) { path = "/hello"; } if (!path.startsWith("/")) { path = "/" + path; } } extensions = extensions.stream().map(String::trim).collect(Collectors.toSet()); } private void printUserInstructions(File root) { getLog().info(""); getLog().info("========================================================================================"); getLog().info( ansi().a("Your new application has been created in ").bold().a(root.getAbsolutePath()).boldOff().toString()); getLog().info(ansi().a("Navigate into this directory and launch your application with ") .bold() .fg(Ansi.Color.CYAN) .a("mvn compile quarkus:dev") .reset() .toString()); getLog().info( ansi().a("Your application will be accessible on ").bold().fg(Ansi.Color.CYAN).a("http: .reset().toString()); getLog().info("========================================================================================"); getLog().info(""); } private boolean isDirectoryEmpty(File dir) { if (!dir.isDirectory()) { throw new IllegalArgumentException("The specified file must be a directory: " + dir.getAbsolutePath()); } String[] children = dir.list(); if (children == null) { throw new IllegalArgumentException("The specified directory cannot be accessed: " + dir.getAbsolutePath()); } return children.length == 0; } }
class CreateProjectMojo extends AbstractMojo { public static final String PLUGIN_KEY = MojoUtils.getPluginGroupId() + ":" + MojoUtils.getPluginArtifactId(); private static final String DEFAULT_GROUP_ID = "org.acme.quarkus.sample"; @Parameter(defaultValue = "${project}") protected MavenProject project; @Parameter(property = "projectGroupId") private String projectGroupId; @Parameter(property = "projectArtifactId") private String projectArtifactId; @Parameter(property = "projectVersion") private String projectVersion; @Parameter(property = "path") private String path; @Parameter(property = "className") private String className; @Parameter(property = "extensions") private Set<String> extensions; @Parameter(defaultValue = "${session}") private MavenSession session; @Component private Prompter prompter; @Component private MavenVersionEnforcer mavenVersionEnforcer; @Component private BuildPluginManager pluginManager; @Override public void execute() throws MojoExecutionException { mavenVersionEnforcer.ensureMavenVersion(getLog(), session); File projectRoot = new File("."); File pom = new File(projectRoot, "pom.xml"); if (pom.isFile()) { if (!StringUtils.isBlank(projectGroupId) || !StringUtils.isBlank(projectArtifactId) || !StringUtils.isBlank(projectVersion)) { throw new MojoExecutionException("Unable to generate the project, the `projectGroupId`, " + "`projectArtifactId` and `projectVersion` parameters are not supported when applied to an " + "existing `pom.xml` file"); } projectGroupId = project.getGroupId(); projectArtifactId = project.getArtifactId(); projectVersion = project.getVersion(); } else { askTheUserForMissingValues(); if (!isDirectoryEmpty(projectRoot)) { projectRoot = new File(projectArtifactId); if (projectRoot.exists()) { throw new MojoExecutionException("Unable to create the project - the current directory is not empty and" + " the directory " + projectArtifactId + " exists"); } } } boolean success; try { sanitizeOptions(); final Map<String, Object> context = new HashMap<>(); context.put("className", className); context.put("path", path); success = new CreateProject(projectRoot) .groupId(projectGroupId) .artifactId(projectArtifactId) .version(projectVersion) .sourceType(determineSourceType(extensions)) .doCreateProject(context); if (success) { new AddExtensions(new File(projectRoot, "pom.xml")) .addExtensions(extensions); } createMavenWrapper(); } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } if (success) { printUserInstructions(projectRoot); } } private SourceType determineSourceType(Set<String> extensions) { return extensions.stream().anyMatch(e -> e.toLowerCase().contains("kotlin")) ? SourceType.KOTLIN : SourceType.JAVA; } private void askTheUserForMissingValues() throws MojoExecutionException { if (!session.getRequest().isInteractiveMode() || shouldUseDefaults()) { if (StringUtils.isBlank(projectGroupId)) { projectGroupId = DEFAULT_GROUP_ID; } if (StringUtils.isBlank(projectArtifactId)) { projectArtifactId = "my-quarkus-project"; } if (StringUtils.isBlank(projectVersion)) { projectVersion = "1.0-SNAPSHOT"; } return; } try { if (StringUtils.isBlank(projectGroupId)) { projectGroupId = prompter.promptWithDefaultValue("Set the project groupId", DEFAULT_GROUP_ID); } if (StringUtils.isBlank(projectArtifactId)) { projectArtifactId = prompter.promptWithDefaultValue("Set the project artifactId", "my-quarkus-project"); } if (StringUtils.isBlank(projectVersion)) { projectVersion = prompter.promptWithDefaultValue("Set the Quarkus version", "1.0-SNAPSHOT"); } if (StringUtils.isBlank(className)) { String answer = prompter.promptWithDefaultValue("Do you want to create a REST resource? (y/n)", "no"); if (isTrueOrYes(answer)) { String defaultResourceName = projectGroupId.replace("-", ".") .replace("_", ".") + ".HelloResource"; className = prompter.promptWithDefaultValue("Set the resource classname", defaultResourceName); if (StringUtils.isBlank(path)) { path = prompter.promptWithDefaultValue("Set the resource path ", "/hello"); } } else { className = null; path = null; } } } catch (IOException e) { throw new MojoExecutionException("Unable to get user input", e); } } private boolean shouldUseDefaults() { return projectArtifactId != null; } private boolean isTrueOrYes(String answer) { if (answer == null) { return false; } String content = answer.trim().toLowerCase(); return "true".equalsIgnoreCase(content) || "yes".equalsIgnoreCase(content) || "y".equalsIgnoreCase(content); } private void sanitizeOptions() { if (className != null) { if (className.endsWith(MojoUtils.JAVA_EXTENSION)) { className = className.substring(0, className.length() - MojoUtils.JAVA_EXTENSION.length()); } else if (className.endsWith(MojoUtils.KOTLIN_EXTENSION)) { className = className.substring(0, className.length() - MojoUtils.KOTLIN_EXTENSION.length()); } if (!className.contains(".")) { className = projectGroupId.replace("-", ".").replace("_", ".") + "." + className; } if (StringUtils.isBlank(path)) { path = "/hello"; } if (!path.startsWith("/")) { path = "/" + path; } } extensions = extensions.stream().map(String::trim).collect(Collectors.toSet()); } private void printUserInstructions(File root) { getLog().info(""); getLog().info("========================================================================================"); getLog().info( ansi().a("Your new application has been created in ").bold().a(root.getAbsolutePath()).boldOff().toString()); getLog().info(ansi().a("Navigate into this directory and launch your application with ") .bold() .fg(Ansi.Color.CYAN) .a("mvn compile quarkus:dev") .reset() .toString()); getLog().info( ansi().a("Your application will be accessible on ").bold().fg(Ansi.Color.CYAN).a("http: .reset().toString()); getLog().info("========================================================================================"); getLog().info(""); } private boolean isDirectoryEmpty(File dir) { if (!dir.isDirectory()) { throw new IllegalArgumentException("The specified file must be a directory: " + dir.getAbsolutePath()); } String[] children = dir.list(); if (children == null) { throw new IllegalArgumentException("The specified directory cannot be accessed: " + dir.getAbsolutePath()); } return children.length == 0; } }
I think we could create two TM pods and verify that getting the jobmanager deployment should only happen once.
public CompletableFuture<Void> createTaskManagerPod(KubernetesPod kubernetesPod) { if (masterDeployment == null) { masterDeployment = this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .get(); } return CompletableFuture.runAsync( () -> { if (masterDeployment == null) { throw new RuntimeException( "Failed to find Deployment named " + clusterId + " in namespace " + this.namespace); } setOwnerReference( masterDeployment, Collections.singletonList(kubernetesPod.getInternalResource())); LOG.debug( "Start to create pod with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml( kubernetesPod.getInternalResource())); this.internalClient.pods().create(kubernetesPod.getInternalResource()); }, kubeClientExecutorService); }
masterDeployment =
public CompletableFuture<Void> createTaskManagerPod(KubernetesPod kubernetesPod) { return CompletableFuture.runAsync( () -> { if (masterDeploymentRef.get() == null) { final Deployment masterDeployment = this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .get(); if (masterDeployment == null) { throw new RuntimeException( "Failed to find Deployment named " + clusterId + " in namespace " + this.namespace); } masterDeploymentRef.compareAndSet(null, masterDeployment); } setOwnerReference( checkNotNull(masterDeploymentRef.get()), Collections.singletonList(kubernetesPod.getInternalResource())); LOG.debug( "Start to create pod with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml( kubernetesPod.getInternalResource())); this.internalClient.pods().create(kubernetesPod.getInternalResource()); }, kubeClientExecutorService); }
class Fabric8FlinkKubeClient implements FlinkKubeClient { private static final Logger LOG = LoggerFactory.getLogger(Fabric8FlinkKubeClient.class); private final String clusterId; private final String namespace; private final int maxRetryAttempts; private final KubernetesConfigOptions.NodePortAddressType nodePortAddressType; private final NamespacedKubernetesClient internalClient; private final ExecutorService kubeClientExecutorService; private Deployment masterDeployment; public Fabric8FlinkKubeClient( Configuration flinkConfig, NamespacedKubernetesClient client, ExecutorService executorService) { this.clusterId = flinkConfig .getOptional(KubernetesConfigOptions.CLUSTER_ID) .orElseThrow( () -> new IllegalArgumentException( String.format( "Configuration option '%s' is not set.", KubernetesConfigOptions.CLUSTER_ID.key()))); this.namespace = flinkConfig.getString(KubernetesConfigOptions.NAMESPACE); this.maxRetryAttempts = flinkConfig.getInteger( KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); this.nodePortAddressType = flinkConfig.get( KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE); this.internalClient = checkNotNull(client); this.kubeClientExecutorService = checkNotNull(executorService); } @Override public void createJobManagerComponent(KubernetesJobManagerSpecification kubernetesJMSpec) { final Deployment deployment = kubernetesJMSpec.getDeployment(); final List<HasMetadata> accompanyingResources = kubernetesJMSpec.getAccompanyingResources(); LOG.debug( "Start to create deployment with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml(deployment)); final Deployment createdDeployment = this.internalClient.apps().deployments().create(deployment); setOwnerReference(createdDeployment, accompanyingResources); this.internalClient.resourceList(accompanyingResources).createOrReplace(); } @Override @Override public CompletableFuture<Void> stopPod(String podName) { return CompletableFuture.runAsync( () -> this.internalClient.pods().withName(podName).delete(), kubeClientExecutorService); } @Override public Optional<Endpoint> getRestEndpoint(String clusterId) { Optional<KubernetesService> restService = getService(KubernetesService.ServiceType.REST_SERVICE, clusterId); if (!restService.isPresent()) { return Optional.empty(); } final Service service = restService.get().getInternalResource(); final int restPort = getRestPortFromExternalService(service); final KubernetesConfigOptions.ServiceExposedType serviceExposedType = ServiceType.classify(service); if (serviceExposedType.isClusterIP()) { return Optional.of( new Endpoint( ExternalServiceDecorator.getNamespacedExternalServiceName( clusterId, namespace), restPort)); } return getRestEndPointFromService(service, restPort); } @Override public List<KubernetesPod> getPodsWithLabels(Map<String, String> labels) { final List<Pod> podList = this.internalClient.pods().withLabels(labels).list().getItems(); if (podList == null || podList.isEmpty()) { return new ArrayList<>(); } return podList.stream().map(KubernetesPod::new).collect(Collectors.toList()); } @Override public void stopAndCleanupCluster(String clusterId) { this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .cascading(true) .delete(); } @Override public Optional<KubernetesService> getService( KubernetesService.ServiceType serviceType, String clusterId) { final String serviceName = getServiceName(serviceType, clusterId); final Service service = this.internalClient.services().withName(serviceName).fromServer().get(); if (service == null) { LOG.debug("Service {} does not exist", serviceName); return Optional.empty(); } return Optional.of(new KubernetesService(service)); } @Override public KubernetesWatch watchPodsAndDoCallback( Map<String, String> labels, WatchCallbackHandler<KubernetesPod> podCallbackHandler) throws Exception { return FutureUtils.retry( () -> CompletableFuture.supplyAsync( () -> new KubernetesWatch( this.internalClient .pods() .withLabels(labels) .watch( new KubernetesPodsWatcher( podCallbackHandler))), kubeClientExecutorService), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class) .isPresent(), kubeClientExecutorService) .get(); } @Override public KubernetesLeaderElector createLeaderElector( KubernetesLeaderElectionConfiguration leaderElectionConfiguration, KubernetesLeaderElector.LeaderCallbackHandler leaderCallbackHandler) { return new KubernetesLeaderElector( this.internalClient, leaderElectionConfiguration, leaderCallbackHandler); } @Override public CompletableFuture<Void> createConfigMap(KubernetesConfigMap configMap) { final String configMapName = configMap.getName(); return CompletableFuture.runAsync( () -> this.internalClient .configMaps() .create(configMap.getInternalResource()), kubeClientExecutorService) .exceptionally( throwable -> { throw new CompletionException( new KubernetesException( "Failed to create ConfigMap " + configMapName, throwable)); }); } @Override public Optional<KubernetesConfigMap> getConfigMap(String name) { final ConfigMap configMap = this.internalClient.configMaps().withName(name).get(); return configMap == null ? Optional.empty() : Optional.of(new KubernetesConfigMap(configMap)); } @Override public CompletableFuture<Boolean> checkAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return FutureUtils.retry( () -> attemptCheckAndUpdateConfigMap(configMapName, updateFunction), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class).isPresent(), kubeClientExecutorService); } private CompletableFuture<Boolean> attemptCheckAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return CompletableFuture.supplyAsync( () -> { final KubernetesConfigMap configMap = getConfigMap(configMapName) .orElseThrow( () -> new CompletionException( new KubernetesException( "Cannot retry checkAndUpdateConfigMap with configMap " + configMapName + " because it does not exist."))); final Optional<KubernetesConfigMap> maybeUpdate = updateFunction.apply(configMap); if (maybeUpdate.isPresent()) { try { internalClient .configMaps() .withName(configMapName) .lockResourceVersion(maybeUpdate.get().getResourceVersion()) .replace(maybeUpdate.get().getInternalResource()); return true; } catch (Throwable throwable) { LOG.debug( "Failed to update ConfigMap {} with data {}. Trying again.", configMap.getName(), configMap.getData()); throw new CompletionException( new PossibleInconsistentStateException(throwable)); } } return false; }, kubeClientExecutorService); } @Override public CompletableFuture<Void> deleteConfigMapsByLabels(Map<String, String> labels) { return CompletableFuture.runAsync( () -> this.internalClient.configMaps().withLabels(labels).delete(), kubeClientExecutorService); } @Override public CompletableFuture<Void> deleteConfigMap(String configMapName) { return CompletableFuture.runAsync( () -> this.internalClient.configMaps().withName(configMapName).delete(), kubeClientExecutorService); } @Override public KubernetesConfigMapSharedWatcher createConfigMapSharedWatcher( Map<String, String> labels) { return new KubernetesConfigMapSharedInformer(this.internalClient, labels); } @Override public void close() { this.internalClient.close(); ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.kubeClientExecutorService); } @Override public KubernetesPod loadPodFromTemplateFile(File file) { if (!file.exists()) { throw new FlinkRuntimeException( String.format("Pod template file %s does not exist.", file)); } return new KubernetesPod(this.internalClient.pods().load(file).get()); } @Override public CompletableFuture<Void> updateServiceTargetPort( KubernetesService.ServiceType serviceType, String clusterId, String portName, int targetPort) { LOG.debug("Update {} target port to {}", portName, targetPort); return CompletableFuture.runAsync( () -> getService(serviceType, clusterId) .ifPresent( service -> { final Service updatedService = new ServiceBuilder( service.getInternalResource()) .editSpec() .editMatchingPort( servicePortBuilder -> servicePortBuilder .build() .getName() .equals( portName)) .withTargetPort( new IntOrString(targetPort)) .endPort() .endSpec() .build(); this.internalClient .services() .withName( getServiceName(serviceType, clusterId)) .replace(updatedService); }), kubeClientExecutorService); } /** * Get the Kubernetes service name. * * @param serviceType The service type * @param clusterId The cluster id * @return Return the Kubernetes service name if the service type is known. */ private String getServiceName(KubernetesService.ServiceType serviceType, String clusterId) { switch (serviceType) { case REST_SERVICE: return ExternalServiceDecorator.getExternalServiceName(clusterId); case INTERNAL_SERVICE: return InternalServiceDecorator.getInternalServiceName(clusterId); default: throw new IllegalArgumentException( "Unrecognized service type: " + serviceType.name()); } } private void setOwnerReference(Deployment deployment, List<HasMetadata> resources) { final OwnerReference deploymentOwnerReference = new OwnerReferenceBuilder() .withName(deployment.getMetadata().getName()) .withApiVersion(deployment.getApiVersion()) .withUid(deployment.getMetadata().getUid()) .withKind(deployment.getKind()) .withController(true) .withBlockOwnerDeletion(true) .build(); resources.forEach( resource -> resource.getMetadata() .setOwnerReferences( Collections.singletonList(deploymentOwnerReference))); } /** Get rest port from the external Service. */ private int getRestPortFromExternalService(Service externalService) { final List<ServicePort> servicePortCandidates = externalService.getSpec().getPorts().stream() .filter(x -> x.getName().equals(Constants.REST_PORT_NAME)) .collect(Collectors.toList()); if (servicePortCandidates.isEmpty()) { throw new RuntimeException( "Failed to find port \"" + Constants.REST_PORT_NAME + "\" in Service \"" + ExternalServiceDecorator.getExternalServiceName(this.clusterId) + "\""); } final ServicePort externalServicePort = servicePortCandidates.get(0); final KubernetesConfigOptions.ServiceExposedType externalServiceType = KubernetesConfigOptions.ServiceExposedType.valueOf( externalService.getSpec().getType()); switch (externalServiceType) { case ClusterIP: case LoadBalancer: return externalServicePort.getPort(); case NodePort: return externalServicePort.getNodePort(); default: throw new RuntimeException("Unrecognized Service type: " + externalServiceType); } } private Optional<Endpoint> getRestEndPointFromService(Service service, int restPort) { if (service.getStatus() == null) { return Optional.empty(); } LoadBalancerStatus loadBalancer = service.getStatus().getLoadBalancer(); boolean hasExternalIP = service.getSpec() != null && service.getSpec().getExternalIPs() != null && !service.getSpec().getExternalIPs().isEmpty(); if (loadBalancer != null) { return getLoadBalancerRestEndpoint(loadBalancer, restPort); } else if (hasExternalIP) { final String address = service.getSpec().getExternalIPs().get(0); if (address != null && !address.isEmpty()) { return Optional.of(new Endpoint(address, restPort)); } } return Optional.empty(); } private Optional<Endpoint> getLoadBalancerRestEndpoint( LoadBalancerStatus loadBalancer, int restPort) { boolean hasIngress = loadBalancer.getIngress() != null && !loadBalancer.getIngress().isEmpty(); String address; if (hasIngress) { address = loadBalancer.getIngress().get(0).getIp(); if (address == null || address.isEmpty()) { address = loadBalancer.getIngress().get(0).getHostname(); } } else { address = internalClient.nodes().list().getItems().stream() .flatMap(node -> node.getStatus().getAddresses().stream()) .filter( nodeAddress -> nodePortAddressType .name() .equals(nodeAddress.getType())) .map(NodeAddress::getAddress) .filter(ip -> !ip.isEmpty()) .findAny() .orElse(null); if (address == null) { LOG.warn( "Unable to find any node ip with type [{}]. Please see [{}] config option for more details.", nodePortAddressType, KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE.key()); } } boolean noAddress = address == null || address.isEmpty(); return noAddress ? Optional.empty() : Optional.of(new Endpoint(address, restPort)); } }
class Fabric8FlinkKubeClient implements FlinkKubeClient { private static final Logger LOG = LoggerFactory.getLogger(Fabric8FlinkKubeClient.class); private final String clusterId; private final String namespace; private final int maxRetryAttempts; private final KubernetesConfigOptions.NodePortAddressType nodePortAddressType; private final NamespacedKubernetesClient internalClient; private final ExecutorService kubeClientExecutorService; private final AtomicReference<Deployment> masterDeploymentRef; public Fabric8FlinkKubeClient( Configuration flinkConfig, NamespacedKubernetesClient client, ExecutorService executorService) { this.clusterId = flinkConfig .getOptional(KubernetesConfigOptions.CLUSTER_ID) .orElseThrow( () -> new IllegalArgumentException( String.format( "Configuration option '%s' is not set.", KubernetesConfigOptions.CLUSTER_ID.key()))); this.namespace = flinkConfig.getString(KubernetesConfigOptions.NAMESPACE); this.maxRetryAttempts = flinkConfig.getInteger( KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); this.nodePortAddressType = flinkConfig.get( KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE); this.internalClient = checkNotNull(client); this.kubeClientExecutorService = checkNotNull(executorService); this.masterDeploymentRef = new AtomicReference<>(); } @Override public void createJobManagerComponent(KubernetesJobManagerSpecification kubernetesJMSpec) { final Deployment deployment = kubernetesJMSpec.getDeployment(); final List<HasMetadata> accompanyingResources = kubernetesJMSpec.getAccompanyingResources(); LOG.debug( "Start to create deployment with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml(deployment)); final Deployment createdDeployment = this.internalClient.apps().deployments().create(deployment); setOwnerReference(createdDeployment, accompanyingResources); this.internalClient.resourceList(accompanyingResources).createOrReplace(); } @Override @Override public CompletableFuture<Void> stopPod(String podName) { return CompletableFuture.runAsync( () -> this.internalClient.pods().withName(podName).delete(), kubeClientExecutorService); } @Override public Optional<Endpoint> getRestEndpoint(String clusterId) { Optional<KubernetesService> restService = getService(KubernetesService.ServiceType.REST_SERVICE, clusterId); if (!restService.isPresent()) { return Optional.empty(); } final Service service = restService.get().getInternalResource(); final int restPort = getRestPortFromExternalService(service); final KubernetesConfigOptions.ServiceExposedType serviceExposedType = ServiceType.classify(service); if (serviceExposedType.isClusterIP()) { return Optional.of( new Endpoint( ExternalServiceDecorator.getNamespacedExternalServiceName( clusterId, namespace), restPort)); } return getRestEndPointFromService(service, restPort); } @Override public List<KubernetesPod> getPodsWithLabels(Map<String, String> labels) { final List<Pod> podList = this.internalClient.pods().withLabels(labels).list().getItems(); if (podList == null || podList.isEmpty()) { return new ArrayList<>(); } return podList.stream().map(KubernetesPod::new).collect(Collectors.toList()); } @Override public void stopAndCleanupCluster(String clusterId) { this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .cascading(true) .delete(); } @Override public Optional<KubernetesService> getService( KubernetesService.ServiceType serviceType, String clusterId) { final String serviceName = getServiceName(serviceType, clusterId); final Service service = this.internalClient.services().withName(serviceName).fromServer().get(); if (service == null) { LOG.debug("Service {} does not exist", serviceName); return Optional.empty(); } return Optional.of(new KubernetesService(service)); } @Override public KubernetesWatch watchPodsAndDoCallback( Map<String, String> labels, WatchCallbackHandler<KubernetesPod> podCallbackHandler) throws Exception { return FutureUtils.retry( () -> CompletableFuture.supplyAsync( () -> new KubernetesWatch( this.internalClient .pods() .withLabels(labels) .watch( new KubernetesPodsWatcher( podCallbackHandler))), kubeClientExecutorService), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class) .isPresent(), kubeClientExecutorService) .get(); } @Override public KubernetesLeaderElector createLeaderElector( KubernetesLeaderElectionConfiguration leaderElectionConfiguration, KubernetesLeaderElector.LeaderCallbackHandler leaderCallbackHandler) { return new KubernetesLeaderElector( this.internalClient, leaderElectionConfiguration, leaderCallbackHandler); } @Override public CompletableFuture<Void> createConfigMap(KubernetesConfigMap configMap) { final String configMapName = configMap.getName(); return CompletableFuture.runAsync( () -> this.internalClient .configMaps() .create(configMap.getInternalResource()), kubeClientExecutorService) .exceptionally( throwable -> { throw new CompletionException( new KubernetesException( "Failed to create ConfigMap " + configMapName, throwable)); }); } @Override public Optional<KubernetesConfigMap> getConfigMap(String name) { final ConfigMap configMap = this.internalClient.configMaps().withName(name).get(); return configMap == null ? Optional.empty() : Optional.of(new KubernetesConfigMap(configMap)); } @Override public CompletableFuture<Boolean> checkAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return FutureUtils.retry( () -> attemptCheckAndUpdateConfigMap(configMapName, updateFunction), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class).isPresent(), kubeClientExecutorService); } private CompletableFuture<Boolean> attemptCheckAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return CompletableFuture.supplyAsync( () -> { final KubernetesConfigMap configMap = getConfigMap(configMapName) .orElseThrow( () -> new CompletionException( new KubernetesException( "Cannot retry checkAndUpdateConfigMap with configMap " + configMapName + " because it does not exist."))); final Optional<KubernetesConfigMap> maybeUpdate = updateFunction.apply(configMap); if (maybeUpdate.isPresent()) { try { internalClient .configMaps() .withName(configMapName) .lockResourceVersion(maybeUpdate.get().getResourceVersion()) .replace(maybeUpdate.get().getInternalResource()); return true; } catch (Throwable throwable) { LOG.debug( "Failed to update ConfigMap {} with data {}. Trying again.", configMap.getName(), configMap.getData()); throw new CompletionException( new PossibleInconsistentStateException(throwable)); } } return false; }, kubeClientExecutorService); } @Override public CompletableFuture<Void> deleteConfigMapsByLabels(Map<String, String> labels) { return CompletableFuture.runAsync( () -> this.internalClient.configMaps().withLabels(labels).delete(), kubeClientExecutorService); } @Override public CompletableFuture<Void> deleteConfigMap(String configMapName) { return CompletableFuture.runAsync( () -> this.internalClient.configMaps().withName(configMapName).delete(), kubeClientExecutorService); } @Override public KubernetesConfigMapSharedWatcher createConfigMapSharedWatcher( Map<String, String> labels) { return new KubernetesConfigMapSharedInformer(this.internalClient, labels); } @Override public void close() { this.internalClient.close(); ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.kubeClientExecutorService); } @Override public KubernetesPod loadPodFromTemplateFile(File file) { if (!file.exists()) { throw new FlinkRuntimeException( String.format("Pod template file %s does not exist.", file)); } return new KubernetesPod(this.internalClient.pods().load(file).get()); } @Override public CompletableFuture<Void> updateServiceTargetPort( KubernetesService.ServiceType serviceType, String clusterId, String portName, int targetPort) { LOG.debug("Update {} target port to {}", portName, targetPort); return CompletableFuture.runAsync( () -> getService(serviceType, clusterId) .ifPresent( service -> { final Service updatedService = new ServiceBuilder( service.getInternalResource()) .editSpec() .editMatchingPort( servicePortBuilder -> servicePortBuilder .build() .getName() .equals( portName)) .withTargetPort( new IntOrString(targetPort)) .endPort() .endSpec() .build(); this.internalClient .services() .withName( getServiceName(serviceType, clusterId)) .replace(updatedService); }), kubeClientExecutorService); } /** * Get the Kubernetes service name. * * @param serviceType The service type * @param clusterId The cluster id * @return Return the Kubernetes service name if the service type is known. */ private String getServiceName(KubernetesService.ServiceType serviceType, String clusterId) { switch (serviceType) { case REST_SERVICE: return ExternalServiceDecorator.getExternalServiceName(clusterId); case INTERNAL_SERVICE: return InternalServiceDecorator.getInternalServiceName(clusterId); default: throw new IllegalArgumentException( "Unrecognized service type: " + serviceType.name()); } } private void setOwnerReference(Deployment deployment, List<HasMetadata> resources) { final OwnerReference deploymentOwnerReference = new OwnerReferenceBuilder() .withName(deployment.getMetadata().getName()) .withApiVersion(deployment.getApiVersion()) .withUid(deployment.getMetadata().getUid()) .withKind(deployment.getKind()) .withController(true) .withBlockOwnerDeletion(true) .build(); resources.forEach( resource -> resource.getMetadata() .setOwnerReferences( Collections.singletonList(deploymentOwnerReference))); } /** Get rest port from the external Service. */ private int getRestPortFromExternalService(Service externalService) { final List<ServicePort> servicePortCandidates = externalService.getSpec().getPorts().stream() .filter(x -> x.getName().equals(Constants.REST_PORT_NAME)) .collect(Collectors.toList()); if (servicePortCandidates.isEmpty()) { throw new RuntimeException( "Failed to find port \"" + Constants.REST_PORT_NAME + "\" in Service \"" + ExternalServiceDecorator.getExternalServiceName(this.clusterId) + "\""); } final ServicePort externalServicePort = servicePortCandidates.get(0); final KubernetesConfigOptions.ServiceExposedType externalServiceType = KubernetesConfigOptions.ServiceExposedType.valueOf( externalService.getSpec().getType()); switch (externalServiceType) { case ClusterIP: case LoadBalancer: return externalServicePort.getPort(); case NodePort: return externalServicePort.getNodePort(); default: throw new RuntimeException("Unrecognized Service type: " + externalServiceType); } } private Optional<Endpoint> getRestEndPointFromService(Service service, int restPort) { if (service.getStatus() == null) { return Optional.empty(); } LoadBalancerStatus loadBalancer = service.getStatus().getLoadBalancer(); boolean hasExternalIP = service.getSpec() != null && service.getSpec().getExternalIPs() != null && !service.getSpec().getExternalIPs().isEmpty(); if (loadBalancer != null) { return getLoadBalancerRestEndpoint(loadBalancer, restPort); } else if (hasExternalIP) { final String address = service.getSpec().getExternalIPs().get(0); if (address != null && !address.isEmpty()) { return Optional.of(new Endpoint(address, restPort)); } } return Optional.empty(); } private Optional<Endpoint> getLoadBalancerRestEndpoint( LoadBalancerStatus loadBalancer, int restPort) { boolean hasIngress = loadBalancer.getIngress() != null && !loadBalancer.getIngress().isEmpty(); String address; if (hasIngress) { address = loadBalancer.getIngress().get(0).getIp(); if (address == null || address.isEmpty()) { address = loadBalancer.getIngress().get(0).getHostname(); } } else { address = internalClient.nodes().list().getItems().stream() .flatMap(node -> node.getStatus().getAddresses().stream()) .filter( nodeAddress -> nodePortAddressType .name() .equals(nodeAddress.getType())) .map(NodeAddress::getAddress) .filter(ip -> !ip.isEmpty()) .findAny() .orElse(null); if (address == null) { LOG.warn( "Unable to find any node ip with type [{}]. Please see [{}] config option for more details.", nodePortAddressType, KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE.key()); } } boolean noAddress = address == null || address.isEmpty(); return noAddress ? Optional.empty() : Optional.of(new Endpoint(address, restPort)); } }
The argument is simple: It's been a (loose) policy all along, so if we want to change it, we should do the bare minimum of trying to figure out whether it makes sense to or not, not just put forth generic arguments that could be applied to almost any situation.
public Uni<WebSocketClientConnection> connect() { if (baseUri == null) { throw new WebSocketClientException("Endpoint URI not set!"); } WebSocketClient client = vertx.createWebSocketClient(populateClientOptions()); WebSocketConnectOptions connectOptions = new WebSocketConnectOptions() .setSsl(baseUri.getScheme().equals("https")) .setHost(baseUri.getHost()) .setPort(baseUri.getPort()); StringBuilder requestUri = new StringBuilder(); String mergedPath = mergePath(baseUri.getPath(), replacePathParameters(path)); requestUri.append(mergedPath); if (baseUri.getQuery() != null) { requestUri.append("?").append(baseUri.getQuery()); } connectOptions.setURI(requestUri.toString()); for (Entry<String, List<String>> e : headers.entrySet()) { for (String val : e.getValue()) { connectOptions.addHeader(e.getKey(), val); } } subprotocols.forEach(connectOptions::addSubProtocol); URI serverEndpointUri; try { serverEndpointUri = new URI(baseUri.getScheme(), baseUri.getUserInfo(), baseUri.getHost(), baseUri.getPort(), mergedPath, baseUri.getQuery(), baseUri.getFragment()); } catch (URISyntaxException e) { throw new WebSocketClientException(e); } return Uni.createFrom().completionStage(() -> client.connect(connectOptions).toCompletionStage()) .map(ws -> { String clientId = BasicWebSocketConnector.class.getName(); TrafficLogger trafficLogger = TrafficLogger.forClient(config); WebSocketClientConnectionImpl connection = new WebSocketClientConnectionImpl(clientId, ws, codecs, pathParams, serverEndpointUri, headers, trafficLogger); if (trafficLogger != null) { trafficLogger.connectionOpened(connection); } connectionManager.add(BasicWebSocketConnectorImpl.class.getName(), connection); if (openHandler != null) { doExecute(connection, null, (c, ignored) -> openHandler.accept(c)); } if (textMessageHandler != null) { ws.textMessageHandler(new Handler<String>() { @Override public void handle(String message) { if (trafficLogger != null) { trafficLogger.textMessageReceived(connection, message); } doExecute(connection, message, textMessageHandler); } }); } if (binaryMessageHandler != null) { ws.binaryMessageHandler(new Handler<Buffer>() { @Override public void handle(Buffer message) { if (trafficLogger != null) { trafficLogger.binaryMessageReceived(connection, message); } doExecute(connection, message, binaryMessageHandler); } }); } if (pongMessageHandler != null) { ws.pongHandler(new Handler<Buffer>() { @Override public void handle(Buffer event) { doExecute(connection, event, pongMessageHandler); } }); } if (errorHandler != null) { ws.exceptionHandler(new Handler<Throwable>() { @Override public void handle(Throwable event) { doExecute(connection, event, errorHandler); } }); } ws.closeHandler(new Handler<Void>() { @Override public void handle(Void event) { if (trafficLogger != null) { trafficLogger.connectionClosed(connection); } if (closeHandler != null) { doExecute(connection, new CloseReason(ws.closeStatusCode(), ws.closeReason()), closeHandler); } connectionManager.remove(BasicWebSocketConnectorImpl.class.getName(), connection); client.close(); } }); return connection; }); }
return Uni.createFrom().completionStage(() -> client.connect(connectOptions).toCompletionStage())
public Uni<WebSocketClientConnection> connect() { if (baseUri == null) { throw new WebSocketClientException("Endpoint URI not set!"); } WebSocketClient client = vertx.createWebSocketClient(populateClientOptions()); WebSocketConnectOptions connectOptions = new WebSocketConnectOptions() .setSsl(baseUri.getScheme().equals("https")) .setHost(baseUri.getHost()) .setPort(baseUri.getPort()); StringBuilder requestUri = new StringBuilder(); String mergedPath = mergePath(baseUri.getPath(), replacePathParameters(path)); requestUri.append(mergedPath); if (baseUri.getQuery() != null) { requestUri.append("?").append(baseUri.getQuery()); } connectOptions.setURI(requestUri.toString()); for (Entry<String, List<String>> e : headers.entrySet()) { for (String val : e.getValue()) { connectOptions.addHeader(e.getKey(), val); } } subprotocols.forEach(connectOptions::addSubProtocol); URI serverEndpointUri; try { serverEndpointUri = new URI(baseUri.getScheme(), baseUri.getUserInfo(), baseUri.getHost(), baseUri.getPort(), mergedPath, baseUri.getQuery(), baseUri.getFragment()); } catch (URISyntaxException e) { throw new WebSocketClientException(e); } return Uni.createFrom().completionStage(() -> client.connect(connectOptions).toCompletionStage()) .map(ws -> { String clientId = BasicWebSocketConnector.class.getName(); TrafficLogger trafficLogger = TrafficLogger.forClient(config); WebSocketClientConnectionImpl connection = new WebSocketClientConnectionImpl(clientId, ws, codecs, pathParams, serverEndpointUri, headers, trafficLogger); if (trafficLogger != null) { trafficLogger.connectionOpened(connection); } connectionManager.add(BasicWebSocketConnectorImpl.class.getName(), connection); if (openHandler != null) { doExecute(connection, null, (c, ignored) -> openHandler.accept(c)); } if (textMessageHandler != null) { ws.textMessageHandler(new Handler<String>() { @Override public void handle(String message) { if (trafficLogger != null) { trafficLogger.textMessageReceived(connection, message); } doExecute(connection, message, textMessageHandler); } }); } if (binaryMessageHandler != null) { ws.binaryMessageHandler(new Handler<Buffer>() { @Override public void handle(Buffer message) { if (trafficLogger != null) { trafficLogger.binaryMessageReceived(connection, message); } doExecute(connection, message, binaryMessageHandler); } }); } if (pongMessageHandler != null) { ws.pongHandler(new Handler<Buffer>() { @Override public void handle(Buffer event) { doExecute(connection, event, pongMessageHandler); } }); } if (errorHandler != null) { ws.exceptionHandler(new Handler<Throwable>() { @Override public void handle(Throwable event) { doExecute(connection, event, errorHandler); } }); } ws.closeHandler(new Handler<Void>() { @Override public void handle(Void event) { if (trafficLogger != null) { trafficLogger.connectionClosed(connection); } if (closeHandler != null) { doExecute(connection, new CloseReason(ws.closeStatusCode(), ws.closeReason()), closeHandler); } connectionManager.remove(BasicWebSocketConnectorImpl.class.getName(), connection); client.close(); } }); return connection; }); }
class BasicWebSocketConnectorImpl extends WebSocketConnectorBase<BasicWebSocketConnectorImpl> implements BasicWebSocketConnector { private static final Logger LOG = Logger.getLogger(BasicWebSocketConnectorImpl.class); private ExecutionModel executionModel = ExecutionModel.BLOCKING; private Consumer<WebSocketClientConnection> openHandler; private BiConsumer<WebSocketClientConnection, String> textMessageHandler; private BiConsumer<WebSocketClientConnection, Buffer> binaryMessageHandler; private BiConsumer<WebSocketClientConnection, Buffer> pongMessageHandler; private BiConsumer<WebSocketClientConnection, CloseReason> closeHandler; private BiConsumer<WebSocketClientConnection, Throwable> errorHandler; BasicWebSocketConnectorImpl(Vertx vertx, Codecs codecs, ClientConnectionManager connectionManager, WebSocketsClientRuntimeConfig config, TlsConfigurationRegistry tlsConfigurationRegistry) { super(vertx, codecs, connectionManager, config, tlsConfigurationRegistry); } @Override public BasicWebSocketConnector executionModel(ExecutionModel model) { this.executionModel = Objects.requireNonNull(model); return self(); } @Override public BasicWebSocketConnector path(String path) { setPath(Objects.requireNonNull(path)); return self(); } @Override public BasicWebSocketConnector onOpen(Consumer<WebSocketClientConnection> consumer) { this.openHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onTextMessage(BiConsumer<WebSocketClientConnection, String> consumer) { this.textMessageHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onBinaryMessage(BiConsumer<WebSocketClientConnection, Buffer> consumer) { this.binaryMessageHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onPong(BiConsumer<WebSocketClientConnection, Buffer> consumer) { this.pongMessageHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onClose(BiConsumer<WebSocketClientConnection, CloseReason> consumer) { this.closeHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onError(BiConsumer<WebSocketClientConnection, Throwable> consumer) { this.errorHandler = Objects.requireNonNull(consumer); return self(); } @Override private <MESSAGE> void doExecute(WebSocketClientConnectionImpl connection, MESSAGE message, BiConsumer<WebSocketClientConnection, MESSAGE> consumer) { Context context = vertx.getOrCreateContext(); ContextSupport.createNewDuplicatedContext(context, connection).runOnContext(new Handler<Void>() { @Override public void handle(Void event) { if (executionModel == ExecutionModel.VIRTUAL_THREAD) { VirtualThreadsRecorder.getCurrent().execute(new Runnable() { public void run() { try { consumer.accept(connection, message); } catch (Exception e) { LOG.errorf(e, "Unable to call handler: " + connection); } } }); } else if (executionModel == ExecutionModel.BLOCKING) { vertx.executeBlocking(new Callable<Void>() { @Override public Void call() { try { consumer.accept(connection, message); } catch (Exception e) { LOG.errorf(e, "Unable to call handler: " + connection); } return null; } }, false); } else { try { consumer.accept(connection, message); } catch (Exception e) { LOG.errorf(e, "Unable to call handler: " + connection); } } } }); } private String mergePath(String path1, String path2) { StringBuilder path = new StringBuilder(); if (path1 != null) { path.append(path1); } if (path2 != null) { if (path1.endsWith("/")) { if (path2.startsWith("/")) { path.append(path2.substring(1)); } else { path.append(path2); } } else { if (path2.startsWith("/")) { path.append(path2); } else { path.append(path2.substring(1)); } } } return path.toString(); } }
class BasicWebSocketConnectorImpl extends WebSocketConnectorBase<BasicWebSocketConnectorImpl> implements BasicWebSocketConnector { private static final Logger LOG = Logger.getLogger(BasicWebSocketConnectorImpl.class); private ExecutionModel executionModel = ExecutionModel.BLOCKING; private Consumer<WebSocketClientConnection> openHandler; private BiConsumer<WebSocketClientConnection, String> textMessageHandler; private BiConsumer<WebSocketClientConnection, Buffer> binaryMessageHandler; private BiConsumer<WebSocketClientConnection, Buffer> pongMessageHandler; private BiConsumer<WebSocketClientConnection, CloseReason> closeHandler; private BiConsumer<WebSocketClientConnection, Throwable> errorHandler; BasicWebSocketConnectorImpl(Vertx vertx, Codecs codecs, ClientConnectionManager connectionManager, WebSocketsClientRuntimeConfig config, TlsConfigurationRegistry tlsConfigurationRegistry) { super(vertx, codecs, connectionManager, config, tlsConfigurationRegistry); } @Override public BasicWebSocketConnector executionModel(ExecutionModel model) { this.executionModel = Objects.requireNonNull(model); return self(); } @Override public BasicWebSocketConnector path(String path) { setPath(Objects.requireNonNull(path)); return self(); } @Override public BasicWebSocketConnector onOpen(Consumer<WebSocketClientConnection> consumer) { this.openHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onTextMessage(BiConsumer<WebSocketClientConnection, String> consumer) { this.textMessageHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onBinaryMessage(BiConsumer<WebSocketClientConnection, Buffer> consumer) { this.binaryMessageHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onPong(BiConsumer<WebSocketClientConnection, Buffer> consumer) { this.pongMessageHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onClose(BiConsumer<WebSocketClientConnection, CloseReason> consumer) { this.closeHandler = Objects.requireNonNull(consumer); return self(); } @Override public BasicWebSocketConnector onError(BiConsumer<WebSocketClientConnection, Throwable> consumer) { this.errorHandler = Objects.requireNonNull(consumer); return self(); } @Override private <MESSAGE> void doExecute(WebSocketClientConnectionImpl connection, MESSAGE message, BiConsumer<WebSocketClientConnection, MESSAGE> consumer) { Context context = vertx.getOrCreateContext(); ContextSupport.createNewDuplicatedContext(context, connection).runOnContext(new Handler<Void>() { @Override public void handle(Void event) { if (executionModel == ExecutionModel.VIRTUAL_THREAD) { VirtualThreadsRecorder.getCurrent().execute(new Runnable() { public void run() { try { consumer.accept(connection, message); } catch (Exception e) { LOG.errorf(e, "Unable to call handler: " + connection); } } }); } else if (executionModel == ExecutionModel.BLOCKING) { vertx.executeBlocking(new Callable<Void>() { @Override public Void call() { try { consumer.accept(connection, message); } catch (Exception e) { LOG.errorf(e, "Unable to call handler: " + connection); } return null; } }, false); } else { try { consumer.accept(connection, message); } catch (Exception e) { LOG.errorf(e, "Unable to call handler: " + connection); } } } }); } private String mergePath(String path1, String path2) { StringBuilder path = new StringBuilder(); if (path1 != null) { path.append(path1); } if (path2 != null) { if (path1.endsWith("/")) { if (path2.startsWith("/")) { path.append(path2.substring(1)); } else { path.append(path2); } } else { if (path2.startsWith("/")) { path.append(path2); } else { path.append(path2.substring(1)); } } } return path.toString(); } }
```suggestion .allMatch(bVarSymbol -> bVarSymbol.defaultableParam)) { ```
private boolean checkFillerValue(BObjectType type) { BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc; if (initFunction == null) { if ((type.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) { return false; } } else { if (initFunction.symbol.getReturnType().getKind() == TypeKind.ERROR) { return false; } if (!hasFillerValue(initFunction.symbol.getReturnType())) { return false; } if (!initFunction.symbol.getParameters().stream() .allMatch(bVarSymbol -> bVarSymbol.defaultableParam == true)) { return false; } } return true; }
.allMatch(bVarSymbol -> bVarSymbol.defaultableParam == true)) {
private boolean checkFillerValue(BObjectType type) { if ((type.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) { return false; } BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc; if (initFunction == null) { return true; } if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) { return false; } for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) { if (!bVarSymbol.defaultableParam) { return false; } } return true; }
class TypePair { BType sourceType; BType targetType; public TypePair(BType sourceType, BType targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } @Override public int hashCode() { return Objects.hash(sourceType, targetType); } }
class TypePair { BType sourceType; BType targetType; public TypePair(BType sourceType, BType targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } @Override public int hashCode() { return Objects.hash(sourceType, targetType); } }
Here is a code snipet to capture a duplicated context, and create if needed: ```java private Context captureTheRightContext(Vertx vertx) { Context context = Vertx.currentContext(); if (context == null) { // Create both a root and a duplicated context context = ((ContextInternal) vertx.getOrCreateContext()).duplicate(); } else { if (isRootContext(context)) { context = ((ContextInternal) context).duplicate(); } } return context; } private boolean isRootContext(Context context) { return context instanceof EventLoopContext || context instanceof WorkerContext; } ```
public String uri() { return httpRequest.uri(); }
return httpRequest.uri();
public String uri() { return httpRequest.uri(); }
class HttpRequestSpan implements HttpRequest { private final HttpRequest httpRequest; private final MultiMap headers; private final Context context; private final io.opentelemetry.context.Context spanContext; HttpRequestSpan( final HttpRequest httpRequest, final MultiMap headers, final Context context, final io.opentelemetry.context.Context spanContext) { this.httpRequest = httpRequest; this.headers = headers; this.context = context; this.spanContext = spanContext; } @Override public int id() { return httpRequest.id(); } @Override @Override public String absoluteURI() { return httpRequest.absoluteURI(); } @Override public HttpMethod method() { return httpRequest.method(); } @Override public MultiMap headers() { return headers; } @Override public SocketAddress remoteAddress() { return httpRequest.remoteAddress(); } public Context getContext() { if (context == null) { throw new IllegalStateException(); } return context; } public io.opentelemetry.context.Context getSpanContext() { if (spanContext == null) { throw new IllegalStateException(); } return spanContext; } static HttpRequestSpan request(HttpRequest httpRequest, MultiMap headers, Context context, io.opentelemetry.context.Context spanContext) { return new HttpRequestSpan(httpRequest, headers, context, spanContext); } }
class HttpRequestSpan implements HttpRequest { private final HttpRequest httpRequest; private final MultiMap headers; private final Context context; private final io.opentelemetry.context.Context spanContext; HttpRequestSpan( final HttpRequest httpRequest, final MultiMap headers, final Context context, final io.opentelemetry.context.Context spanContext) { this.httpRequest = httpRequest; this.headers = headers; this.context = context; this.spanContext = spanContext; } @Override public int id() { return httpRequest.id(); } @Override @Override public String absoluteURI() { return httpRequest.absoluteURI(); } @Override public HttpMethod method() { return httpRequest.method(); } @Override public MultiMap headers() { return headers; } @Override public SocketAddress remoteAddress() { return httpRequest.remoteAddress(); } public Context getContext() { if (context == null) { throw new IllegalStateException("The Vert.x Context is not set"); } return context; } public io.opentelemetry.context.Context getSpanContext() { if (spanContext == null) { throw new IllegalStateException("The OpenTelemetry Context is not set"); } return spanContext; } static HttpRequestSpan request(HttpRequest httpRequest, MultiMap headers, Context context, io.opentelemetry.context.Context spanContext) { return new HttpRequestSpan(httpRequest, headers, context, spanContext); } }
@sberyozkin I can add something like that, but I can't imagine how could it happen, you see: - default token state manager has lesser priority, so it's not used - we assert that token state is in the database (before =1 after =0) (that's exactly the `assertTokenStateCount` you are commenting on), and it simply can't be that 2 token state managers are used due to how CDI works Anyway, if you are convinced it is needed, I'll do that.
public void testCodeFlow() throws IOException { try (final WebClient webClient = createWebClient()) { TextPage textPage = webClient.getPage(url.toString() + "unprotected"); assertEquals("unprotected", textPage.getContent()); HtmlPage page; page = webClient.getPage(url.toString() + "protected"); assertEquals("Sign in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); textPage = loginForm.getInputByName("login").click(); assertEquals("alice", textPage.getContent()); assertTokenStateCount(1); webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create(url.toString() + "protected/logout").toURL())); assertEquals(302, webResponse.getStatusCode()); assertNull(webClient.getCookieManager().getCookie("q_session")); webClient.getCookieManager().clearCookies(); assertTokenStateCount(0); } }
assertTokenStateCount(1);
public void testCodeFlow() throws IOException { try (final WebClient webClient = createWebClient()) { TextPage textPage = webClient.getPage(url.toString() + "unprotected"); assertEquals("unprotected", textPage.getContent()); HtmlPage page; page = webClient.getPage(url.toString() + "protected"); assertEquals("Sign in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); textPage = loginForm.getInputByName("login").click(); assertEquals("alice", textPage.getContent()); assertTokenStateCount(1); webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create(url.toString() + "protected/logout").toURL())); assertEquals(302, webResponse.getStatusCode()); assertNull(webClient.getCookieManager().getCookie("q_session")); webClient.getCookieManager().clearCookies(); assertTokenStateCount(0); } }
class AbstractDbTokenStateManagerTest { protected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension) { return createQuarkusUnitTest(reactiveSqlClientExtension, null); } protected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension, Consumer<JavaArchive> customizer) { return new QuarkusUnitTest() .withApplicationRoot((jar) -> { jar .addClasses(ProtectedResource.class, UnprotectedResource.class, PublicResource.class) .addAsResource("application.properties"); if (customizer != null) { customizer.accept(jar); } }) .setForcedDependencies( List.of(Dependency.of("io.quarkus", reactiveSqlClientExtension, Version.getVersion()))); } @TestHTTPResource URL url; @Test protected static void assertTokenStateCount(Integer tokenStateCount) { RestAssured .given() .get("public/db-state-manager-table-content") .then() .statusCode(200) .body(Matchers.is(tokenStateCount.toString())); } protected static WebClient createWebClient() { WebClient webClient = new WebClient(); webClient.setCssErrorHandler(new SilentCssErrorHandler()); return webClient; } }
class AbstractDbTokenStateManagerTest { protected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension) { return createQuarkusUnitTest(reactiveSqlClientExtension, null); } protected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension, Consumer<JavaArchive> customizer) { return new QuarkusUnitTest() .withApplicationRoot((jar) -> { jar .addClasses(ProtectedResource.class, UnprotectedResource.class, PublicResource.class) .addAsResource("application.properties"); if (customizer != null) { customizer.accept(jar); } }) .setForcedDependencies( List.of(Dependency.of("io.quarkus", reactiveSqlClientExtension, Version.getVersion()))); } @TestHTTPResource URL url; @Test protected static void assertTokenStateCount(Integer tokenStateCount) { RestAssured .given() .get("public/db-state-manager-table-content") .then() .statusCode(200) .body(Matchers.is(tokenStateCount.toString())); } protected static WebClient createWebClient() { WebClient webClient = new WebClient(); webClient.setCssErrorHandler(new SilentCssErrorHandler()); return webClient; } }
Builder construction is not expected to be expensive, i.e. this should not be called until `build()`. Here it should only set the flag.
public BeamSqlEnvBuilder loadUdfUdafFromProvider() { ServiceLoader.<UdfUdafProvider>load(UdfUdafProvider.class) .forEach( ins -> { ins.getBeamSqlUdfs().forEach((udfName, udfClass) -> registerUdf(udfName, udfClass)); ins.getSerializableFunctionUdfs() .forEach((udfName, udfFn) -> registerUdf(udfName, udfFn)); ins.getUdafs().forEach((udafName, udafFn) -> registerUdaf(udafName, udafFn)); }); return this; }
ServiceLoader.<UdfUdafProvider>load(UdfUdafProvider.class)
public BeamSqlEnvBuilder loadUdfUdafFromProvider() { ServiceLoader.<UdfUdafProvider>load(UdfUdafProvider.class) .forEach( ins -> { ins.getBeamSqlUdfs().forEach((udfName, udfClass) -> registerUdf(udfName, udfClass)); ins.getSerializableFunctionUdfs() .forEach((udfName, udfFn) -> registerUdf(udfName, udfFn)); ins.getUdafs().forEach((udafName, udafFn) -> registerUdaf(udafName, udafFn)); }); return this; }
class BeamSqlEnvBuilder { private String queryPlannerClassName = "org.apache.beam.sdk.extensions.sql.impl.CalciteQueryPlanner"; private TableProvider initialTableProvider; private String currentSchemaName; private Map<String, TableProvider> schemaMap = new HashMap<>(); private Set<Map.Entry<String, Function>> functionSet = new HashSet<>(); public BeamSqlEnvBuilder setInitializeTableProvider(TableProvider tableProvider) { initialTableProvider = tableProvider; return this; } public BeamSqlEnvBuilder registerBuiltinUdf(Map<String, List<Method>> methods) { for (Map.Entry<String, List<Method>> entry : methods.entrySet()) { for (Method method : entry.getValue()) { functionSet.add(new SimpleEntry<>(entry.getKey(), UdfImpl.create(method))); } } return this; } public BeamSqlEnvBuilder addSchema(String name, TableProvider tableProvider) { if (schemaMap.containsKey(name)) { throw new RuntimeException("Schema " + name + " is registered twice."); } schemaMap.put(name, tableProvider); return this; } public BeamSqlEnvBuilder setCurrentSchema(String name) { currentSchemaName = name; return this; } /** Register a UDF function which can be used in SQL expression. */ public BeamSqlEnvBuilder registerUdf(String functionName, Class<?> clazz, String method) { functionSet.add(new SimpleEntry<>(functionName, UdfImpl.create(clazz, method))); return this; } /** Register a UDF function which can be used in SQL expression. */ public BeamSqlEnvBuilder registerUdf(String functionName, Class<? extends BeamSqlUdf> clazz) { return registerUdf(functionName, clazz, BeamSqlUdf.UDF_METHOD); } public BeamSqlEnvBuilder registerUdf(String functionName, SerializableFunction sfn) { return registerUdf(functionName, sfn.getClass(), "apply"); } /** * Register a UDAF function which can be used in GROUP-BY expression. See {@link * org.apache.beam.sdk.transforms.Combine.CombineFn} on how to implement a UDAF. */ public BeamSqlEnvBuilder registerUdaf(String functionName, Combine.CombineFn combineFn) { functionSet.add(new SimpleEntry<>(functionName, new UdafImpl(combineFn))); return this; } /** Load all UDF/UDAF from {@link UdfUdafProvider}. */ public BeamSqlEnvBuilder loadBeamBuiltinFunctions() { for (BeamBuiltinFunctionProvider provider : ServiceLoader.load(BeamBuiltinFunctionProvider.class)) { registerBuiltinUdf(provider.getBuiltinMethods()); } return this; } public BeamSqlEnvBuilder setQueryPlannerClassName(String name) { queryPlannerClassName = name; return this; } /** * Build function to create an instance of BeamSqlEnv based on preset fields. * * @return BeamSqlEnv. */ public BeamSqlEnv build() { if (initialTableProvider == null) { throw new RuntimeException("initialTableProvider must be set in BeamSqlEnvBuilder."); } JdbcConnection jdbcConnection = JdbcDriver.connect(initialTableProvider); for (Map.Entry<String, TableProvider> schemaEntry : schemaMap.entrySet()) { jdbcConnection.setSchema(schemaEntry.getKey(), schemaEntry.getValue()); } if (currentSchemaName != null) { try { jdbcConnection.setSchema(currentSchemaName); } catch (SQLException e) { throw new RuntimeException(e); } } for (Map.Entry<String, Function> functionEntry : functionSet) { jdbcConnection.getCurrentSchemaPlus().add(functionEntry.getKey(), functionEntry.getValue()); } QueryPlanner planner; if (queryPlannerClassName.equals( "org.apache.beam.sdk.extensions.sql.impl.CalciteQueryPlanner")) { planner = new CalciteQueryPlanner(jdbcConnection); } else { try { planner = (QueryPlanner) Class.forName(queryPlannerClassName) .getConstructor(JdbcConnection.class) .newInstance(jdbcConnection); } catch (NoSuchMethodException | ClassNotFoundException | InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException( String.format("Cannot construct query planner %s", queryPlannerClassName), e); } } return new BeamSqlEnv(jdbcConnection, planner); } }
class BeamSqlEnvBuilder { private String queryPlannerClassName = "org.apache.beam.sdk.extensions.sql.impl.CalciteQueryPlanner"; private TableProvider initialTableProvider; private String currentSchemaName; private Map<String, TableProvider> schemaMap = new HashMap<>(); private Set<Map.Entry<String, Function>> functionSet = new HashSet<>(); public BeamSqlEnvBuilder setInitializeTableProvider(TableProvider tableProvider) { initialTableProvider = tableProvider; return this; } public BeamSqlEnvBuilder registerBuiltinUdf(Map<String, List<Method>> methods) { for (Map.Entry<String, List<Method>> entry : methods.entrySet()) { for (Method method : entry.getValue()) { functionSet.add(new SimpleEntry<>(entry.getKey(), UdfImpl.create(method))); } } return this; } public BeamSqlEnvBuilder addSchema(String name, TableProvider tableProvider) { if (schemaMap.containsKey(name)) { throw new RuntimeException("Schema " + name + " is registered twice."); } schemaMap.put(name, tableProvider); return this; } public BeamSqlEnvBuilder setCurrentSchema(String name) { currentSchemaName = name; return this; } /** Register a UDF function which can be used in SQL expression. */ public BeamSqlEnvBuilder registerUdf(String functionName, Class<?> clazz, String method) { functionSet.add(new SimpleEntry<>(functionName, UdfImpl.create(clazz, method))); return this; } /** Register a UDF function which can be used in SQL expression. */ public BeamSqlEnvBuilder registerUdf(String functionName, Class<? extends BeamSqlUdf> clazz) { return registerUdf(functionName, clazz, BeamSqlUdf.UDF_METHOD); } public BeamSqlEnvBuilder registerUdf(String functionName, SerializableFunction sfn) { return registerUdf(functionName, sfn.getClass(), "apply"); } /** * Register a UDAF function which can be used in GROUP-BY expression. See {@link * org.apache.beam.sdk.transforms.Combine.CombineFn} on how to implement a UDAF. */ public BeamSqlEnvBuilder registerUdaf(String functionName, Combine.CombineFn combineFn) { functionSet.add(new SimpleEntry<>(functionName, new UdafImpl(combineFn))); return this; } /** Load all UDF/UDAF from {@link UdfUdafProvider}. */ public BeamSqlEnvBuilder loadBeamBuiltinFunctions() { for (BeamBuiltinFunctionProvider provider : ServiceLoader.load(BeamBuiltinFunctionProvider.class)) { registerBuiltinUdf(provider.getBuiltinMethods()); } return this; } public BeamSqlEnvBuilder setQueryPlannerClassName(String name) { queryPlannerClassName = name; return this; } /** * Build function to create an instance of BeamSqlEnv based on preset fields. * * @return BeamSqlEnv. */ public BeamSqlEnv build() { if (initialTableProvider == null) { throw new RuntimeException("initialTableProvider must be set in BeamSqlEnvBuilder."); } JdbcConnection jdbcConnection = JdbcDriver.connect(initialTableProvider); for (Map.Entry<String, TableProvider> schemaEntry : schemaMap.entrySet()) { jdbcConnection.setSchema(schemaEntry.getKey(), schemaEntry.getValue()); } if (currentSchemaName != null) { try { jdbcConnection.setSchema(currentSchemaName); } catch (SQLException e) { throw new RuntimeException(e); } } for (Map.Entry<String, Function> functionEntry : functionSet) { jdbcConnection.getCurrentSchemaPlus().add(functionEntry.getKey(), functionEntry.getValue()); } QueryPlanner planner; if (queryPlannerClassName.equals( "org.apache.beam.sdk.extensions.sql.impl.CalciteQueryPlanner")) { planner = new CalciteQueryPlanner(jdbcConnection); } else { try { planner = (QueryPlanner) Class.forName(queryPlannerClassName) .getConstructor(JdbcConnection.class) .newInstance(jdbcConnection); } catch (NoSuchMethodException | ClassNotFoundException | InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException( String.format("Cannot construct query planner %s", queryPlannerClassName), e); } } return new BeamSqlEnv(jdbcConnection, planner); } }
do no-op on this. Thanks for the explanation.
private Mac getMacInstance() { int masterKeyLatestHashCode = this.cosmosKeyCredential.getMasterKey().hashCode(); if (masterKeyLatestHashCode != this.masterKeyHashCode) { byte[] masterKeyBytes = this.cosmosKeyCredential.getMasterKey().getBytes(); byte[] masterKeyDecodedBytes = Utils.Base64Decoder.decode(masterKeyBytes); SecretKey signingKey = new SecretKeySpec(masterKeyDecodedBytes, "HMACSHA256"); try { Mac macInstance = Mac.getInstance("HMACSHA256"); macInstance.init(signingKey); this.masterKeyHashCode = masterKeyLatestHashCode; return macInstance; } catch (NoSuchAlgorithmException | InvalidKeyException e) { throw new IllegalStateException(e); } } else { try { return (Mac)this.macInstance.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException(e); } } }
int masterKeyLatestHashCode = this.cosmosKeyCredential.getMasterKey().hashCode();
private Mac getMacInstance() { int masterKeyLatestHashCode = this.cosmosKeyCredential.keyHashCode(); if (masterKeyLatestHashCode != this.masterKeyHashCode) { byte[] masterKeyBytes = this.cosmosKeyCredential.key().getBytes(); byte[] masterKeyDecodedBytes = Utils.Base64Decoder.decode(masterKeyBytes); SecretKey signingKey = new SecretKeySpec(masterKeyDecodedBytes, "HMACSHA256"); try { Mac macInstance = Mac.getInstance("HMACSHA256"); macInstance.init(signingKey); this.masterKeyHashCode = masterKeyLatestHashCode; return macInstance; } catch (NoSuchAlgorithmException | InvalidKeyException e) { throw new IllegalStateException(e); } } else { try { return (Mac)this.macInstance.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException(e); } } }
class BaseAuthorizationTokenProvider implements AuthorizationTokenProvider { private static final String AUTH_PREFIX = "type=master&ver=1.0&sig="; private final CosmosKeyCredential cosmosKeyCredential; private Mac macInstance; private int masterKeyHashCode; public BaseAuthorizationTokenProvider(CosmosKeyCredential cosmosKeyCredential) { this.cosmosKeyCredential = cosmosKeyCredential; this.macInstance = getMacInstance(); } private static String getResourceSegment(ResourceType resourceType) { switch (resourceType) { case Attachment: return Paths.ATTACHMENTS_PATH_SEGMENT; case Database: return Paths.DATABASES_PATH_SEGMENT; case Conflict: return Paths.CONFLICTS_PATH_SEGMENT; case Document: return Paths.DOCUMENTS_PATH_SEGMENT; case DocumentCollection: return Paths.COLLECTIONS_PATH_SEGMENT; case Offer: return Paths.OFFERS_PATH_SEGMENT; case Permission: return Paths.PERMISSIONS_PATH_SEGMENT; case StoredProcedure: return Paths.STORED_PROCEDURES_PATH_SEGMENT; case Trigger: return Paths.TRIGGERS_PATH_SEGMENT; case UserDefinedFunction: return Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT; case User: return Paths.USERS_PATH_SEGMENT; case PartitionKeyRange: return Paths.PARTITION_KEY_RANGES_PATH_SEGMENT; case Media: return Paths.MEDIA_PATH_SEGMENT; case DatabaseAccount: return ""; default: return null; } } /** * This API is a helper method to create auth header based on client request using masterkey. * * @param verb the verb. * @param resourceIdOrFullName the resource id or full name * @param resourceType the resource type. * @param headers the request headers. * @return the key authorization signature. */ public String generateKeyAuthorizationSignature(String verb, String resourceIdOrFullName, ResourceType resourceType, Map<String, String> headers) { return this.generateKeyAuthorizationSignature(verb, resourceIdOrFullName, BaseAuthorizationTokenProvider.getResourceSegment(resourceType).toLowerCase(), headers); } /** * This API is a helper method to create auth header based on client request using masterkey. * * @param verb the verb * @param resourceIdOrFullName the resource id or full name * @param resourceSegment the resource segment * @param headers the request headers * @return the key authorization signature */ public String generateKeyAuthorizationSignature(String verb, String resourceIdOrFullName, String resourceSegment, Map<String, String> headers) { if (verb == null || verb.isEmpty()) { throw new IllegalArgumentException("verb"); } if (resourceIdOrFullName == null) { resourceIdOrFullName = ""; } if (resourceSegment == null) { throw new IllegalArgumentException("resourceSegment"); } if (headers == null) { throw new IllegalArgumentException("headers"); } if (StringUtils.isEmpty(this.cosmosKeyCredential.getMasterKey())) { throw new IllegalArgumentException("key credentials cannot be empty"); } if(!PathsHelper.isNameBased(resourceIdOrFullName)) { resourceIdOrFullName = resourceIdOrFullName.toLowerCase(Locale.ROOT); } StringBuilder body = new StringBuilder(); body.append(verb.toLowerCase()) .append('\n') .append(resourceSegment) .append('\n') .append(resourceIdOrFullName) .append('\n'); if (headers.containsKey(HttpConstants.HttpHeaders.X_DATE)) { body.append(headers.get(HttpConstants.HttpHeaders.X_DATE).toLowerCase()); } body.append('\n'); if (headers.containsKey(HttpConstants.HttpHeaders.HTTP_DATE)) { body.append(headers.get(HttpConstants.HttpHeaders.HTTP_DATE).toLowerCase()); } body.append('\n'); Mac mac = getMacInstance(); byte[] digest = mac.doFinal(body.toString().getBytes()); String auth = Utils.encodeBase64String(digest); return AUTH_PREFIX + auth; } /** * This API is a helper method to create auth header based on client request using resourceTokens. * * @param resourceTokens the resource tokens. * @param path the path. * @param resourceId the resource id. * @return the authorization token. */ public String getAuthorizationTokenUsingResourceTokens(Map<String, String> resourceTokens, String path, String resourceId) { if (resourceTokens == null) { throw new IllegalArgumentException("resourceTokens"); } String resourceToken = null; if (resourceTokens.containsKey(resourceId) && resourceTokens.get(resourceId) != null) { resourceToken = resourceTokens.get(resourceId); } else if (StringUtils.isEmpty(path) || StringUtils.isEmpty(resourceId)) { if (resourceTokens.size() > 0) { resourceToken = resourceTokens.values().iterator().next(); } } else { String[] pathParts = StringUtils.split(path, "/"); String[] resourceTypes = {"dbs", "colls", "docs", "sprocs", "udfs", "triggers", "users", "permissions", "attachments", "media", "conflicts"}; HashSet<String> resourceTypesSet = new HashSet<String>(); Collections.addAll(resourceTypesSet, resourceTypes); for (int i = pathParts.length - 1; i >= 0; --i) { if (!resourceTypesSet.contains(pathParts[i]) && resourceTokens.containsKey(pathParts[i])) { resourceToken = resourceTokens.get(pathParts[i]); } } } return resourceToken; } public String generateKeyAuthorizationSignature(String verb, URI uri, Map<String, String> headers) { if (StringUtils.isEmpty(verb)) { throw new IllegalArgumentException(String.format(RMResources.StringArgumentNullOrEmpty, "verb")); } if (uri == null) { throw new IllegalArgumentException("uri"); } if (headers == null) { throw new IllegalArgumentException("headers"); } PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); getResourceTypeAndIdOrFullName(uri, pathInfo); return generateKeyAuthorizationSignatureNew(verb, pathInfo.resourceIdOrFullName, pathInfo.resourcePath, headers); } private String generateKeyAuthorizationSignatureNew(String verb, String resourceIdValue, String resourceType, Map<String, String> headers) { if (StringUtils.isEmpty(verb)) { throw new IllegalArgumentException(String.format(RMResources.StringArgumentNullOrEmpty, "verb")); } if (resourceType == null) { throw new IllegalArgumentException(String.format(RMResources.StringArgumentNullOrEmpty, "resourceType")); } if (headers == null) { throw new IllegalArgumentException("headers"); } String authResourceId = getAuthorizationResourceIdOrFullName(resourceType, resourceIdValue); String payLoad = generateMessagePayload(verb, authResourceId, resourceType, headers); Mac mac = this.getMacInstance(); byte[] digest = mac.doFinal(payLoad.getBytes()); String authorizationToken = Utils.encodeBase64String(digest); String authtoken = AUTH_PREFIX + authorizationToken; return HttpUtils.urlEncode(authtoken); } private String generateMessagePayload(String verb, String resourceId, String resourceType, Map<String, String> headers) { String xDate = headers.get(HttpConstants.HttpHeaders.X_DATE); String date = headers.get(HttpConstants.HttpHeaders.HTTP_DATE); if (StringUtils.isEmpty(xDate) && (StringUtils.isEmpty(date) || StringUtils.isWhitespace(date))) { headers.put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); xDate = Utils.nowAsRFC1123(); } if (!PathsHelper.isNameBased(resourceId)) { resourceId = resourceId.toLowerCase(); } StringBuilder payload = new StringBuilder(); payload.append(verb.toLowerCase()) .append('\n') .append(resourceType.toLowerCase()) .append('\n') .append(resourceId) .append('\n') .append(xDate.toLowerCase()) .append('\n') .append(StringUtils.isEmpty(xDate) ? date.toLowerCase() : "") .append('\n'); return payload.toString(); } private String getAuthorizationResourceIdOrFullName(String resourceType, String resourceIdOrFullName) { if (StringUtils.isEmpty(resourceType) || StringUtils.isEmpty(resourceIdOrFullName)) { return resourceIdOrFullName; } if (PathsHelper.isNameBased(resourceIdOrFullName)) { return resourceIdOrFullName; } if (resourceType.equalsIgnoreCase(Paths.OFFERS_PATH_SEGMENT) || resourceType.equalsIgnoreCase(Paths.PARTITIONS_PATH_SEGMENT) || resourceType.equalsIgnoreCase(Paths.TOPOLOGY_PATH_SEGMENT) || resourceType.equalsIgnoreCase(Paths.RID_RANGE_PATH_SEGMENT)) { return resourceIdOrFullName; } ResourceId parsedRId = ResourceId.parse(resourceIdOrFullName); if (resourceType.equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) { return parsedRId.getDatabaseId().toString(); } else if (resourceType.equalsIgnoreCase(Paths.USERS_PATH_SEGMENT)) { return parsedRId.getUserId().toString(); } else if (resourceType.equalsIgnoreCase(Paths.COLLECTIONS_PATH_SEGMENT)) { return parsedRId.getDocumentCollectionId().toString(); } else if (resourceType.equalsIgnoreCase(Paths.DOCUMENTS_PATH_SEGMENT)) { return parsedRId.getDocumentId().toString(); } else { return resourceIdOrFullName; } } private void getResourceTypeAndIdOrFullName(URI uri, PathInfo pathInfo) { if (uri == null) { throw new IllegalArgumentException("uri"); } pathInfo.resourcePath = StringUtils.EMPTY; pathInfo.resourceIdOrFullName = StringUtils.EMPTY; String[] segments = StringUtils.split(uri.toString(), Constants.Properties.PATH_SEPARATOR); if (segments == null || segments.length < 1) { throw new IllegalArgumentException(RMResources.InvalidUrl); } String pathAndQuery = StringUtils.EMPTY ; if(StringUtils.isNotEmpty(uri.getPath())) { pathAndQuery+= uri.getPath(); } if(StringUtils.isNotEmpty(uri.getQuery())) { pathAndQuery+="?"; pathAndQuery+= uri.getQuery(); } if (!PathsHelper.tryParsePathSegments(pathAndQuery, pathInfo, null)) { pathInfo.resourcePath = StringUtils.EMPTY; pathInfo.resourceIdOrFullName = StringUtils.EMPTY; } } }
class BaseAuthorizationTokenProvider implements AuthorizationTokenProvider { private static final String AUTH_PREFIX = "type=master&ver=1.0&sig="; private final CosmosKeyCredential cosmosKeyCredential; private final Mac macInstance; private int masterKeyHashCode; public BaseAuthorizationTokenProvider(CosmosKeyCredential cosmosKeyCredential) { this.cosmosKeyCredential = cosmosKeyCredential; this.macInstance = getMacInstance(); } private static String getResourceSegment(ResourceType resourceType) { switch (resourceType) { case Attachment: return Paths.ATTACHMENTS_PATH_SEGMENT; case Database: return Paths.DATABASES_PATH_SEGMENT; case Conflict: return Paths.CONFLICTS_PATH_SEGMENT; case Document: return Paths.DOCUMENTS_PATH_SEGMENT; case DocumentCollection: return Paths.COLLECTIONS_PATH_SEGMENT; case Offer: return Paths.OFFERS_PATH_SEGMENT; case Permission: return Paths.PERMISSIONS_PATH_SEGMENT; case StoredProcedure: return Paths.STORED_PROCEDURES_PATH_SEGMENT; case Trigger: return Paths.TRIGGERS_PATH_SEGMENT; case UserDefinedFunction: return Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT; case User: return Paths.USERS_PATH_SEGMENT; case PartitionKeyRange: return Paths.PARTITION_KEY_RANGES_PATH_SEGMENT; case Media: return Paths.MEDIA_PATH_SEGMENT; case DatabaseAccount: return ""; default: return null; } } /** * This API is a helper method to create auth header based on client request using masterkey. * * @param verb the verb. * @param resourceIdOrFullName the resource id or full name * @param resourceType the resource type. * @param headers the request headers. * @return the key authorization signature. */ public String generateKeyAuthorizationSignature(String verb, String resourceIdOrFullName, ResourceType resourceType, Map<String, String> headers) { return this.generateKeyAuthorizationSignature(verb, resourceIdOrFullName, BaseAuthorizationTokenProvider.getResourceSegment(resourceType).toLowerCase(), headers); } /** * This API is a helper method to create auth header based on client request using masterkey. * * @param verb the verb * @param resourceIdOrFullName the resource id or full name * @param resourceSegment the resource segment * @param headers the request headers * @return the key authorization signature */ public String generateKeyAuthorizationSignature(String verb, String resourceIdOrFullName, String resourceSegment, Map<String, String> headers) { if (verb == null || verb.isEmpty()) { throw new IllegalArgumentException("verb"); } if (resourceIdOrFullName == null) { resourceIdOrFullName = ""; } if (resourceSegment == null) { throw new IllegalArgumentException("resourceSegment"); } if (headers == null) { throw new IllegalArgumentException("headers"); } if (StringUtils.isEmpty(this.cosmosKeyCredential.key())) { throw new IllegalArgumentException("key credentials cannot be empty"); } if(!PathsHelper.isNameBased(resourceIdOrFullName)) { resourceIdOrFullName = resourceIdOrFullName.toLowerCase(Locale.ROOT); } StringBuilder body = new StringBuilder(); body.append(verb.toLowerCase()) .append('\n') .append(resourceSegment) .append('\n') .append(resourceIdOrFullName) .append('\n'); if (headers.containsKey(HttpConstants.HttpHeaders.X_DATE)) { body.append(headers.get(HttpConstants.HttpHeaders.X_DATE).toLowerCase()); } body.append('\n'); if (headers.containsKey(HttpConstants.HttpHeaders.HTTP_DATE)) { body.append(headers.get(HttpConstants.HttpHeaders.HTTP_DATE).toLowerCase()); } body.append('\n'); Mac mac = getMacInstance(); byte[] digest = mac.doFinal(body.toString().getBytes()); String auth = Utils.encodeBase64String(digest); return AUTH_PREFIX + auth; } /** * This API is a helper method to create auth header based on client request using resourceTokens. * * @param resourceTokens the resource tokens. * @param path the path. * @param resourceId the resource id. * @return the authorization token. */ public String getAuthorizationTokenUsingResourceTokens(Map<String, String> resourceTokens, String path, String resourceId) { if (resourceTokens == null) { throw new IllegalArgumentException("resourceTokens"); } String resourceToken = null; if (resourceTokens.containsKey(resourceId) && resourceTokens.get(resourceId) != null) { resourceToken = resourceTokens.get(resourceId); } else if (StringUtils.isEmpty(path) || StringUtils.isEmpty(resourceId)) { if (resourceTokens.size() > 0) { resourceToken = resourceTokens.values().iterator().next(); } } else { String[] pathParts = StringUtils.split(path, "/"); String[] resourceTypes = {"dbs", "colls", "docs", "sprocs", "udfs", "triggers", "users", "permissions", "attachments", "media", "conflicts"}; HashSet<String> resourceTypesSet = new HashSet<String>(); Collections.addAll(resourceTypesSet, resourceTypes); for (int i = pathParts.length - 1; i >= 0; --i) { if (!resourceTypesSet.contains(pathParts[i]) && resourceTokens.containsKey(pathParts[i])) { resourceToken = resourceTokens.get(pathParts[i]); } } } return resourceToken; } public String generateKeyAuthorizationSignature(String verb, URI uri, Map<String, String> headers) { if (StringUtils.isEmpty(verb)) { throw new IllegalArgumentException(String.format(RMResources.StringArgumentNullOrEmpty, "verb")); } if (uri == null) { throw new IllegalArgumentException("uri"); } if (headers == null) { throw new IllegalArgumentException("headers"); } PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); getResourceTypeAndIdOrFullName(uri, pathInfo); return generateKeyAuthorizationSignatureNew(verb, pathInfo.resourceIdOrFullName, pathInfo.resourcePath, headers); } private String generateKeyAuthorizationSignatureNew(String verb, String resourceIdValue, String resourceType, Map<String, String> headers) { if (StringUtils.isEmpty(verb)) { throw new IllegalArgumentException(String.format(RMResources.StringArgumentNullOrEmpty, "verb")); } if (resourceType == null) { throw new IllegalArgumentException(String.format(RMResources.StringArgumentNullOrEmpty, "resourceType")); } if (headers == null) { throw new IllegalArgumentException("headers"); } String authResourceId = getAuthorizationResourceIdOrFullName(resourceType, resourceIdValue); String payLoad = generateMessagePayload(verb, authResourceId, resourceType, headers); Mac mac = this.getMacInstance(); byte[] digest = mac.doFinal(payLoad.getBytes()); String authorizationToken = Utils.encodeBase64String(digest); String authtoken = AUTH_PREFIX + authorizationToken; return HttpUtils.urlEncode(authtoken); } private String generateMessagePayload(String verb, String resourceId, String resourceType, Map<String, String> headers) { String xDate = headers.get(HttpConstants.HttpHeaders.X_DATE); String date = headers.get(HttpConstants.HttpHeaders.HTTP_DATE); if (StringUtils.isEmpty(xDate) && (StringUtils.isEmpty(date) || StringUtils.isWhitespace(date))) { headers.put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); xDate = Utils.nowAsRFC1123(); } if (!PathsHelper.isNameBased(resourceId)) { resourceId = resourceId.toLowerCase(); } StringBuilder payload = new StringBuilder(); payload.append(verb.toLowerCase()) .append('\n') .append(resourceType.toLowerCase()) .append('\n') .append(resourceId) .append('\n') .append(xDate.toLowerCase()) .append('\n') .append(StringUtils.isEmpty(xDate) ? date.toLowerCase() : "") .append('\n'); return payload.toString(); } private String getAuthorizationResourceIdOrFullName(String resourceType, String resourceIdOrFullName) { if (StringUtils.isEmpty(resourceType) || StringUtils.isEmpty(resourceIdOrFullName)) { return resourceIdOrFullName; } if (PathsHelper.isNameBased(resourceIdOrFullName)) { return resourceIdOrFullName; } if (resourceType.equalsIgnoreCase(Paths.OFFERS_PATH_SEGMENT) || resourceType.equalsIgnoreCase(Paths.PARTITIONS_PATH_SEGMENT) || resourceType.equalsIgnoreCase(Paths.TOPOLOGY_PATH_SEGMENT) || resourceType.equalsIgnoreCase(Paths.RID_RANGE_PATH_SEGMENT)) { return resourceIdOrFullName; } ResourceId parsedRId = ResourceId.parse(resourceIdOrFullName); if (resourceType.equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) { return parsedRId.getDatabaseId().toString(); } else if (resourceType.equalsIgnoreCase(Paths.USERS_PATH_SEGMENT)) { return parsedRId.getUserId().toString(); } else if (resourceType.equalsIgnoreCase(Paths.COLLECTIONS_PATH_SEGMENT)) { return parsedRId.getDocumentCollectionId().toString(); } else if (resourceType.equalsIgnoreCase(Paths.DOCUMENTS_PATH_SEGMENT)) { return parsedRId.getDocumentId().toString(); } else { return resourceIdOrFullName; } } private void getResourceTypeAndIdOrFullName(URI uri, PathInfo pathInfo) { if (uri == null) { throw new IllegalArgumentException("uri"); } pathInfo.resourcePath = StringUtils.EMPTY; pathInfo.resourceIdOrFullName = StringUtils.EMPTY; String[] segments = StringUtils.split(uri.toString(), Constants.Properties.PATH_SEPARATOR); if (segments == null || segments.length < 1) { throw new IllegalArgumentException(RMResources.InvalidUrl); } String pathAndQuery = StringUtils.EMPTY ; if(StringUtils.isNotEmpty(uri.getPath())) { pathAndQuery+= uri.getPath(); } if(StringUtils.isNotEmpty(uri.getQuery())) { pathAndQuery+="?"; pathAndQuery+= uri.getQuery(); } if (!PathsHelper.tryParsePathSegments(pathAndQuery, pathInfo, null)) { pathInfo.resourcePath = StringUtils.EMPTY; pathInfo.resourceIdOrFullName = StringUtils.EMPTY; } } }
I would love to, and in both MongoDB and Hibernate Panache! I'll open an issue for it so we will see if someone else would love it :)
public CompletionStage<Void> deleteBook(@PathParam("id") String id) { return ReactiveBookEntity.findById(new ObjectId(id)).thenCompose(book -> book.delete()); }
return ReactiveBookEntity.findById(new ObjectId(id)).thenCompose(book -> book.delete());
public CompletionStage<Void> deleteBook(@PathParam("id") String id) { return ReactiveBookEntity.findById(new ObjectId(id)).thenCompose(book -> book.delete()); }
class ReactiveBookEntityResource { private static final Logger LOGGER = Logger.getLogger(ReactiveBookEntityResource.class); @PostConstruct void init() { String databaseName = ReactiveBookEntity.mongoDatabase().getName(); String collectionName = ReactiveBookEntity.mongoCollection().getNamespace().getCollectionName(); LOGGER.infov("Using BookEntity[database={0}, collection={1}]", databaseName, collectionName); } @GET public CompletionStage<List<ReactiveBookEntity>> getBooks(@QueryParam("sort") String sort) { if (sort != null) { return ReactiveBookEntity.listAll(Sort.ascending(sort)); } return ReactiveBookEntity.listAll(); } @GET @Path("/stream") @Produces(MediaType.SERVER_SENT_EVENTS) @SseElementType(MediaType.APPLICATION_JSON) public Publisher<ReactiveBookEntity> streamBooks(@QueryParam("sort") String sort) { if (sort != null) { return ReactiveBookEntity.streamAll(Sort.ascending(sort)); } return ReactiveBookEntity.streamAll(); } @POST public CompletionStage<Response> addBook(ReactiveBookEntity book) { return book.persist().thenApply(v -> { String id = book.id.toString(); return Response.created(URI.create("/books/entity" + id)).build(); }); } @PUT public CompletionStage<Response> updateBook(ReactiveBookEntity book) { return book.update().thenApply(v -> Response.accepted().build()); } @PATCH public CompletionStage<Response> upsertBook(ReactiveBookEntity book) { return book.persistOrUpdate().thenApply(v -> Response.accepted().build()); } @DELETE @Path("/{id}") @GET @Path("/{id}") public CompletionStage<ReactiveBookEntity> getBook(@PathParam("id") String id) { return ReactiveBookEntity.findById(new ObjectId(id)); } @GET @Path("/search/{author}") public CompletionStage<List<ReactiveBookEntity>> getBooksByAuthor(@PathParam("author") String author) { return ReactiveBookEntity.list("author", author); } @GET @Path("/search") public CompletionStage<ReactiveBookEntity> search(@QueryParam("author") String author, @QueryParam("title") String title, @QueryParam("dateFrom") String dateFrom, @QueryParam("dateTo") String dateTo) { if (author != null) { return ReactiveBookEntity.find("{'author': ?1,'bookTitle': ?2}", author, title).firstResult(); } return ReactiveBookEntity .find("{'creationDate': {$gte: ?1}, 'creationDate': {$lte: ?2}}", LocalDate.parse(dateFrom), LocalDate.parse(dateTo)) .firstResult(); } @GET @Path("/search2") public CompletionStage<ReactiveBookEntity> search2(@QueryParam("author") String author, @QueryParam("title") String title, @QueryParam("dateFrom") String dateFrom, @QueryParam("dateTo") String dateTo) { if (author != null) { return ReactiveBookEntity.find("{'author': :author,'bookTitle': :title}", Parameters.with("author", author).and("title", title)).firstResult(); } return ReactiveBookEntity.find("{'creationDate': {$gte: :dateFrom}, 'creationDate': {$lte: :dateTo}}", Parameters.with("dateFrom", LocalDate.parse(dateFrom)).and("dateTo", LocalDate.parse(dateTo))).firstResult(); } }
class ReactiveBookEntityResource { private static final Logger LOGGER = Logger.getLogger(ReactiveBookEntityResource.class); @PostConstruct void init() { String databaseName = ReactiveBookEntity.mongoDatabase().getName(); String collectionName = ReactiveBookEntity.mongoCollection().getNamespace().getCollectionName(); LOGGER.infov("Using BookEntity[database={0}, collection={1}]", databaseName, collectionName); } @GET public CompletionStage<List<ReactiveBookEntity>> getBooks(@QueryParam("sort") String sort) { if (sort != null) { return ReactiveBookEntity.listAll(Sort.ascending(sort)); } return ReactiveBookEntity.listAll(); } @GET @Path("/stream") @Produces(MediaType.SERVER_SENT_EVENTS) @SseElementType(MediaType.APPLICATION_JSON) public Publisher<ReactiveBookEntity> streamBooks(@QueryParam("sort") String sort) { if (sort != null) { return ReactiveBookEntity.streamAll(Sort.ascending(sort)); } return ReactiveBookEntity.streamAll(); } @POST public CompletionStage<Response> addBook(ReactiveBookEntity book) { return book.persist().thenApply(v -> { String id = book.id.toString(); return Response.created(URI.create("/books/entity" + id)).build(); }); } @PUT public CompletionStage<Response> updateBook(ReactiveBookEntity book) { return book.update().thenApply(v -> Response.accepted().build()); } @PATCH public CompletionStage<Response> upsertBook(ReactiveBookEntity book) { return book.persistOrUpdate().thenApply(v -> Response.accepted().build()); } @DELETE @Path("/{id}") @GET @Path("/{id}") public CompletionStage<ReactiveBookEntity> getBook(@PathParam("id") String id) { return ReactiveBookEntity.findById(new ObjectId(id)); } @GET @Path("/search/{author}") public CompletionStage<List<ReactiveBookEntity>> getBooksByAuthor(@PathParam("author") String author) { return ReactiveBookEntity.list("author", author); } @GET @Path("/search") public CompletionStage<ReactiveBookEntity> search(@QueryParam("author") String author, @QueryParam("title") String title, @QueryParam("dateFrom") String dateFrom, @QueryParam("dateTo") String dateTo) { if (author != null) { return ReactiveBookEntity.find("{'author': ?1,'bookTitle': ?2}", author, title).firstResult(); } return ReactiveBookEntity .find("{'creationDate': {$gte: ?1}, 'creationDate': {$lte: ?2}}", LocalDate.parse(dateFrom), LocalDate.parse(dateTo)) .firstResult(); } @GET @Path("/search2") public CompletionStage<ReactiveBookEntity> search2(@QueryParam("author") String author, @QueryParam("title") String title, @QueryParam("dateFrom") String dateFrom, @QueryParam("dateTo") String dateTo) { if (author != null) { return ReactiveBookEntity.find("{'author': :author,'bookTitle': :title}", Parameters.with("author", author).and("title", title)).firstResult(); } return ReactiveBookEntity.find("{'creationDate': {$gte: :dateFrom}, 'creationDate': {$lte: :dateTo}}", Parameters.with("dateFrom", LocalDate.parse(dateFrom)).and("dateTo", LocalDate.parse(dateTo))).firstResult(); } @DELETE public CompletionStage<Void> deleteAll() { return ReactiveBookEntity.deleteAll().thenApply(l -> null); } }
You'll want to remove this before merging as live tests will fail. I've seen something similar to this in multiple project, mind filing an issue to add infrastructure to `azure-core-test` which allows enables test proxying when an environment variable is set. #Resolved
ContainerRegistryClientBuilder getContainerRegistryBuilder(HttpClient httpClient) { Configuration configuration = new Configuration() .put("java.net.useSystemProxies", "true") .put("http.proxyHost", "localhost") .put("http.proxyPort", "8888") .put("http.proxyUser", "1") .put("http.proxyPassword", "1"); List<Function<String, String>> redactors = new ArrayList<>(); redactors.add(data -> redact(data, JSON_PROPERTY_VALUE_REDACTION_PATTERN.matcher(data), "REDACTED")); ContainerRegistryClientBuilder builder = new ContainerRegistryClientBuilder() .endpoint(getEndpoint()) .configuration(configuration) .httpClient(httpClient == null ? interceptorManager.getPlaybackClient() : httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY)) .addPolicy(interceptorManager.getRecordPolicy(redactors)); if (getTestMode() == TestMode.PLAYBACK) { builder.credential(new FakeCredentials()); } else { builder.credential(new DefaultAzureCredentialBuilder().build()); } return builder; }
.put("http.proxyPassword", "1");
ContainerRegistryClientBuilder getContainerRegistryBuilder(HttpClient httpClient) { List<Function<String, String>> redactors = new ArrayList<>(); redactors.add(data -> redact(data, JSON_PROPERTY_VALUE_REDACTION_PATTERN.matcher(data), "REDACTED")); ContainerRegistryClientBuilder builder = new ContainerRegistryClientBuilder() .endpoint(getEndpoint()) .httpClient(httpClient == null ? interceptorManager.getPlaybackClient() : httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY)) .addPolicy(interceptorManager.getRecordPolicy(redactors)); if (getTestMode() == TestMode.PLAYBACK) { builder.credential(new FakeCredentials()); } else { builder.credential(new DefaultAzureCredentialBuilder().build()); } return builder; }
class ContainerRegistryClientTestBase extends TestBase { private static final String AZURE_FORM_RECOGNIZER_ENDPOINT = "CONTAINERREGISTRY_ENDPOINT"; private static final String INVALID_KEY = "invalid key"; private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile("(\".*_token\":\"(.*)\".*)"); private Duration durationTestMode; @Override protected void beforeTest() { if (interceptorManager.isPlaybackMode()) { durationTestMode = ONE_NANO_DURATION; } else { durationTestMode = DEFAULT_POLL_INTERVAL; } } static class FakeCredentials implements TokenCredential { @Override public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) { return Mono.just(new AccessToken("someFakeToken", OffsetDateTime.MAX)); } } protected String getEndpoint() { return interceptorManager.isPlaybackMode() ? "https: : Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_ENDPOINT); } private String redact(String content, Matcher matcher, String replacement) { while (matcher.find()) { if (matcher.groupCount() == 2) { String captureGroup = matcher.group(1); if (!CoreUtils.isNullOrEmpty(captureGroup)) { content = content.replace(matcher.group(2), replacement); } } } return content; } }
class ContainerRegistryClientTestBase extends TestBase { private static final String AZURE_FORM_RECOGNIZER_ENDPOINT = "CONTAINERREGISTRY_ENDPOINT"; private static final String INVALID_KEY = "invalid key"; private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile("(\".*_token\":\"(.*)\".*)"); private Duration durationTestMode; @Override protected void beforeTest() { if (interceptorManager.isPlaybackMode()) { durationTestMode = ONE_NANO_DURATION; } else { durationTestMode = DEFAULT_POLL_INTERVAL; } } static class FakeCredentials implements TokenCredential { @Override public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) { return Mono.just(new AccessToken("someFakeToken", OffsetDateTime.MAX)); } } protected String getEndpoint() { return interceptorManager.isPlaybackMode() ? "https: : Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_ENDPOINT); } private String redact(String content, Matcher matcher, String replacement) { while (matcher.find()) { if (matcher.groupCount() == 2) { String captureGroup = matcher.group(1); if (!CoreUtils.isNullOrEmpty(captureGroup)) { content = content.replace(matcher.group(2), replacement); } } } return content; } }
@stuartwdouglas Sure, but returning `403` would be consistent with how we handle mismatched Origins for the preflights. This filter did return `200` for mismatched origins for preflight requests but then we changed it to `403` after some discussions which you may recall (incl for it to be consistent with Vert.x) I see, if we block the method for an actual POST/etc request anyway, then, for the preflight request, it does not really matter if it is 403 or not, but if 403 can prevent some browsers even trying to follow up with an actual request then it should be good. Can we agree on `403` in case of preflight method or header mis-matches ?
public void corsPreflightTest() { String origin = "http: String headers = "X-Custom"; given().header("Origin", origin) .header("Access-Control-Request-Method", "GET") .header("Access-Control-Request-Headers", headers) .when() .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "POST") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("test", "test") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "GET") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("test", "wrongpassword") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "POST") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("user", "user") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "PUT") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("user", "user") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST") .header("Access-Control-Allow-Headers", headers); }
.statusCode(200)
public void corsPreflightTest() { String origin = "http: String headers = "X-Custom"; given().header("Origin", origin) .header("Access-Control-Request-Method", "GET") .header("Access-Control-Request-Headers", headers) .when() .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "POST") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("test", "test") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "GET") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("test", "wrongpassword") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "POST") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("user", "user") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST") .header("Access-Control-Allow-Headers", headers); given().header("Origin", origin) .header("Access-Control-Request-Method", "PUT") .header("Access-Control-Request-Headers", headers) .when() .auth().basic("user", "user") .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST") .header("Access-Control-Allow-Headers", headers); }
class CORSSecurityTestCase { private static final String APP_PROPS = "" + "quarkus.http.cors=true\n" + "quarkus.http.cors.origins=*\n" + "quarkus.http.cors.methods=GET, OPTIONS, POST\n" + "quarkus.http.auth.basic=true\n" + "quarkus.http.auth.policy.r1.roles-allowed=test\n" + "quarkus.http.auth.permission.roles1.paths=/test\n" + "quarkus.http.auth.permission.roles1.policy=r1\n"; @RegisterExtension static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(new Supplier<>() { @Override public JavaArchive get() { return ShrinkWrap.create(JavaArchive.class) .addClasses(TestIdentityProvider.class, TestIdentityController.class, PathHandler.class) .addAsResource(new StringAsset(APP_PROPS), "application.properties"); } }); @BeforeAll public static void setup() { TestIdentityController.resetRoles().add("test", "test", "test").add("user", "user", "user"); } @Test @DisplayName("Handles a preflight CORS request correctly") @Test @DisplayName("Handles a direct CORS request correctly") public void corsNoPreflightTest() { String origin = "http: given().header("Origin", origin) .when() .get("/test").then() .statusCode(401) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST"); given().header("Origin", origin) .when() .auth().basic("test", "test") .get("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST") .body(Matchers.equalTo("test:/test")); given().header("Origin", origin) .when() .auth().basic("test", "wrongpassword") .get("/test").then() .statusCode(401) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST"); given().header("Origin", origin) .when() .auth().basic("user", "user") .get("/test").then() .statusCode(403) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET, OPTIONS, POST"); } }
class CORSSecurityTestCase { private static final String APP_PROPS = "" + "quarkus.http.cors=true\n" + "quarkus.http.cors.origins=*\n" + "quarkus.http.cors.methods=GET,OPTIONS,POST\n" + "quarkus.http.auth.basic=true\n" + "quarkus.http.auth.policy.r1.roles-allowed=test\n" + "quarkus.http.auth.permission.roles1.paths=/test\n" + "quarkus.http.auth.permission.roles1.policy=r1\n"; @RegisterExtension static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(new Supplier<>() { @Override public JavaArchive get() { return ShrinkWrap.create(JavaArchive.class) .addClasses(TestIdentityProvider.class, TestIdentityController.class, PathHandler.class) .addAsResource(new StringAsset(APP_PROPS), "application.properties"); } }); @BeforeAll public static void setup() { TestIdentityController.resetRoles().add("test", "test", "test").add("user", "user", "user"); } @Test @DisplayName("Handles a preflight CORS request correctly") @Test @DisplayName("Handles a direct CORS request correctly") public void corsNoPreflightTest() { String origin = "http: given().header("Origin", origin) .when() .get("/test").then() .statusCode(401) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST"); given().header("Origin", origin) .when() .auth().basic("test", "test") .get("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST") .body(Matchers.equalTo("test:/test")); given().header("Origin", origin) .when() .auth().basic("test", "wrongpassword") .get("/test").then() .statusCode(401) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST"); given().header("Origin", origin) .when() .auth().basic("user", "user") .get("/test").then() .statusCode(403) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", "GET,OPTIONS,POST"); } }
My original idea of the contract was: higher priority wins but other ``AutoAddScopeBuildItem``s can log a warning or throw an exception. And if a scope was added in another transformer before ``AutoAddScopeBuildItem``s are processed then just log a debug message (note that all ``AutoAddScopeBuildItem``s are processed in one annotation transformer.)
public void transform(TransformationContext context) { ClassInfo clazz = context.getTarget().asClass(); if (scopes.isScopeDeclaredOn(clazz)) { return; } DotName scope = scopes.getScope(context.getAnnotations()).map(AnnotationInstance::name).orElse(null); if (scope != null) { LOGGER.debugf("Scope %s was already added by another annotation transformer", scope); return; } Boolean requiresContainerServices = null; String reason = null; for (AutoAddScopeBuildItem autoScope : sortedAutoScopes) { if (autoScope.isContainerServicesRequired()) { if (requiresContainerServices == null) { requiresContainerServices = requiresContainerServices(clazz, containerAnnotationNames, beanArchiveIndex.getIndex()); } if (!requiresContainerServices) { continue; } } if (autoScope.test(clazz, context.getAnnotations(), beanArchiveIndex.getIndex())) { if (scope != null) { LOGGER.debugf("Scope %s was already added for reason: %s", scope, reason); continue; } scope = autoScope.getDefaultScope(); reason = autoScope.getReason(); context.transform().add(scope).done(); if (autoScope.isUnremovable()) { unremovables.add(clazz.name()); } LOGGER.debugf("Automatically added scope %s to class %s: %s", scope, clazz, autoScope.getReason()); break; } } }
LOGGER.debugf("Scope %s was already added for reason: %s", scope, reason);
public void transform(TransformationContext context) { if (scopes.isScopeIn(context.getAnnotations())) { return; } ClassInfo clazz = context.getTarget().asClass(); DotName scope = null; Boolean requiresContainerServices = null; String reason = null; for (AutoAddScopeBuildItem autoScope : sortedAutoScopes) { if (autoScope.isContainerServicesRequired()) { if (requiresContainerServices == null) { requiresContainerServices = requiresContainerServices(clazz, containerAnnotationNames, beanArchiveIndex.getIndex()); } if (!requiresContainerServices) { continue; } } if (autoScope.test(clazz, context.getAnnotations(), beanArchiveIndex.getIndex())) { if (scope != null) { BiConsumer<DotName, String> consumer = autoScope.getScopeAlreadyAdded(); if (consumer != null) { consumer.accept(scope, reason); } else { LOGGER.debugf("Scope %s was already added for reason: %s", scope, reason); } continue; } scope = autoScope.getDefaultScope(); reason = autoScope.getReason(); context.transform().add(scope).done(); if (autoScope.isUnremovable()) { unremovables.add(clazz.name()); } LOGGER.debugf("Automatically added scope %s to class %s: %s", scope, clazz, autoScope.getReason()); } } }
class AutoAddScopeProcessor { private static final Logger LOGGER = Logger.getLogger(AutoAddScopeProcessor.class); @BuildStep void annotationTransformer(List<AutoAddScopeBuildItem> autoScopes, CustomScopeAnnotationsBuildItem scopes, List<AutoInjectAnnotationBuildItem> autoInjectAnnotations, BuildProducer<AnnotationsTransformerBuildItem> annotationsTransformers, BuildProducer<UnremovableBeanBuildItem> unremovableBeans, BeanArchiveIndexBuildItem beanArchiveIndex) throws Exception { if (autoScopes.isEmpty()) { return; } List<AutoAddScopeBuildItem> sortedAutoScopes = autoScopes.stream() .sorted(Comparator.comparingInt(AutoAddScopeBuildItem::getPriority).reversed()).collect(Collectors.toList()); Set<DotName> containerAnnotationNames = autoInjectAnnotations.stream().flatMap(a -> a.getAnnotationNames().stream()) .collect(Collectors.toSet()); containerAnnotationNames.add(DotNames.POST_CONSTRUCT); containerAnnotationNames.add(DotNames.PRE_DESTROY); containerAnnotationNames.add(DotNames.INJECT); Set<DotName> unremovables = new HashSet<>(); annotationsTransformers.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { @Override public boolean appliesTo(Kind kind) { return kind == Kind.CLASS; } @Override public int getPriority() { return DEFAULT_PRIORITY * 2; } @Override })); if (!unremovables.isEmpty()) { unremovableBeans.produce(new UnremovableBeanBuildItem(new Predicate<BeanInfo>() { @Override public boolean test(BeanInfo bean) { return bean.isClassBean() && unremovables.contains(bean.getBeanClass()); } })); } } private boolean requiresContainerServices(ClassInfo clazz, Set<DotName> containerAnnotationNames, IndexView index) { if (hasContainerAnnotation(clazz, containerAnnotationNames)) { return true; } if (index != null) { DotName superName = clazz.superName(); while (superName != null && !superName.equals(DotNames.OBJECT)) { final ClassInfo superClass = index.getClassByName(superName); if (superClass != null) { if (hasContainerAnnotation(superClass, containerAnnotationNames)) { return true; } superName = superClass.superName(); } else { superName = null; } } } return false; } private boolean hasContainerAnnotation(ClassInfo clazz, Set<DotName> containerAnnotationNames) { if (clazz.annotations().isEmpty() || containerAnnotationNames.isEmpty()) { return false; } return containsAny(clazz, containerAnnotationNames); } private boolean containsAny(ClassInfo clazz, Set<DotName> annotationNames) { for (DotName annotation : clazz.annotations().keySet()) { if (annotationNames.contains(annotation)) { return true; } } return false; } }
class AutoAddScopeProcessor { private static final Logger LOGGER = Logger.getLogger(AutoAddScopeProcessor.class); @BuildStep void annotationTransformer(List<AutoAddScopeBuildItem> autoScopes, CustomScopeAnnotationsBuildItem scopes, List<AutoInjectAnnotationBuildItem> autoInjectAnnotations, BuildProducer<AnnotationsTransformerBuildItem> annotationsTransformers, BuildProducer<UnremovableBeanBuildItem> unremovableBeans, BeanArchiveIndexBuildItem beanArchiveIndex) throws Exception { if (autoScopes.isEmpty()) { return; } List<AutoAddScopeBuildItem> sortedAutoScopes = autoScopes.stream() .sorted(Comparator.comparingInt(AutoAddScopeBuildItem::getPriority).reversed()).collect(Collectors.toList()); Set<DotName> containerAnnotationNames = autoInjectAnnotations.stream().flatMap(a -> a.getAnnotationNames().stream()) .collect(Collectors.toSet()); containerAnnotationNames.add(DotNames.POST_CONSTRUCT); containerAnnotationNames.add(DotNames.PRE_DESTROY); containerAnnotationNames.add(DotNames.INJECT); Set<DotName> unremovables = new HashSet<>(); annotationsTransformers.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { @Override public boolean appliesTo(Kind kind) { return kind == Kind.CLASS; } @Override public int getPriority() { return DEFAULT_PRIORITY + 1000; } @Override })); if (!unremovables.isEmpty()) { unremovableBeans.produce(new UnremovableBeanBuildItem(new Predicate<BeanInfo>() { @Override public boolean test(BeanInfo bean) { return bean.isClassBean() && unremovables.contains(bean.getBeanClass()); } })); } } private boolean requiresContainerServices(ClassInfo clazz, Set<DotName> containerAnnotationNames, IndexView index) { if (hasContainerAnnotation(clazz, containerAnnotationNames)) { return true; } if (index != null) { DotName superName = clazz.superName(); while (superName != null && !superName.equals(DotNames.OBJECT)) { final ClassInfo superClass = index.getClassByName(superName); if (superClass != null) { if (hasContainerAnnotation(superClass, containerAnnotationNames)) { return true; } superName = superClass.superName(); } else { superName = null; } } } return false; } private boolean hasContainerAnnotation(ClassInfo clazz, Set<DotName> containerAnnotationNames) { if (clazz.annotations().isEmpty() || containerAnnotationNames.isEmpty()) { return false; } return containsAny(clazz, containerAnnotationNames); } private boolean containsAny(ClassInfo clazz, Set<DotName> annotationNames) { for (DotName annotation : clazz.annotations().keySet()) { if (annotationNames.contains(annotation)) { return true; } } return false; } }
just to note - make sure inner().retentionPolicy() is initialized before accessing its properties (`withEnabled(..))
public Update withRetentionPolicyEnabled() { this.inner().retentionPolicy().withEnabled(true); return this; }
return this;
public Update withRetentionPolicyEnabled() { ensureRetentionPolicy(); this.inner().retentionPolicy().withEnabled(true); return this; }
class FlowLogSettingsImpl extends RefreshableWrapperImpl<FlowLogInformationInner, FlowLogSettings> implements FlowLogSettings, FlowLogSettings.Update { private final NetworkWatcherImpl parent; FlowLogSettingsImpl(NetworkWatcherImpl parent, FlowLogInformationInner inner) { super(inner); this.parent = parent; } @Override public FlowLogSettings apply() { return applyAsync().toBlocking().last(); } @Override public Observable<FlowLogSettings> applyAsync() { return this.parent().manager().inner().networkWatchers() .setFlowLogConfigurationAsync(parent().resourceGroupName(), parent().name(), this.inner()) .map(new Func1<FlowLogInformationInner, FlowLogSettings>() { @Override public FlowLogSettings call(FlowLogInformationInner flowLogInformationInner) { return new FlowLogSettingsImpl(FlowLogSettingsImpl.this.parent, flowLogInformationInner); } }); } @Override public ServiceFuture<FlowLogSettings> applyAsync(ServiceCallback<FlowLogSettings> callback) { return ServiceFuture.fromBody(applyAsync(), callback); } @Override public Update withLoggingEnabled() { this.inner().withEnabled(true); return this; } @Override public Update withLoggingDisabled() { this.inner().withEnabled(false); return this; } @Override public Update withStorageAccount(String storageId) { this.inner().withStorageId(storageId); return this; } @Override @Override public Update withRetentionPolicyDisabled() { this.inner().retentionPolicy().withEnabled(false); return this; } @Override public Update withRetentionPolicyDays(int days) { this.inner().retentionPolicy().withDays(days); return this; } @Override public Update update() { return this; } @Override protected Observable<FlowLogInformationInner> getInnerAsync() { return this.parent().manager().inner().networkWatchers() .getFlowLogStatusAsync(parent().resourceGroupName(), parent().name(), inner().targetResourceId()); } @Override public NetworkWatcherImpl parent() { return parent; } @Override public String key() { return null; } @Override public String targetResourceId() { return inner().targetResourceId(); } @Override public String storageId() { return inner().storageId(); } @Override public boolean enabled() { return inner().enabled(); } @Override public boolean isRetentionEnabled() { return inner().retentionPolicy().enabled(); } @Override public int retentionDays() { return inner().retentionPolicy().days(); } }
class FlowLogSettingsImpl extends RefreshableWrapperImpl<FlowLogInformationInner, FlowLogSettings> implements FlowLogSettings, FlowLogSettings.Update { private final NetworkWatcherImpl parent; private final String nsgId; FlowLogSettingsImpl(NetworkWatcherImpl parent, FlowLogInformationInner inner, String nsgId) { super(inner); this.parent = parent; this.nsgId = nsgId; } @Override public FlowLogSettings apply() { return applyAsync().toBlocking().last(); } @Override public Observable<FlowLogSettings> applyAsync() { return this.parent().manager().inner().networkWatchers() .setFlowLogConfigurationAsync(parent().resourceGroupName(), parent().name(), this.inner()) .map(new Func1<FlowLogInformationInner, FlowLogSettings>() { @Override public FlowLogSettings call(FlowLogInformationInner flowLogInformationInner) { return new FlowLogSettingsImpl(FlowLogSettingsImpl.this.parent, flowLogInformationInner, nsgId); } }); } @Override public ServiceFuture<FlowLogSettings> applyAsync(ServiceCallback<FlowLogSettings> callback) { return ServiceFuture.fromBody(applyAsync(), callback); } @Override public Update withLogging() { this.inner().withEnabled(true); return this; } @Override public Update withoutLogging() { this.inner().withEnabled(false); return this; } @Override public Update withStorageAccount(String storageId) { this.inner().withStorageId(storageId); return this; } @Override @Override public Update withRetentionPolicyDisabled() { ensureRetentionPolicy(); this.inner().retentionPolicy().withEnabled(false); return this; } @Override public Update withRetentionPolicyDays(int days) { ensureRetentionPolicy(); this.inner().retentionPolicy().withDays(days); return this; } private void ensureRetentionPolicy() { if (this.inner().retentionPolicy() == null) { this.inner().withRetentionPolicy(new RetentionPolicyParameters()); } } @Override public Update update() { return this; } @Override protected Observable<FlowLogInformationInner> getInnerAsync() { return this.parent().manager().inner().networkWatchers() .getFlowLogStatusAsync(parent().resourceGroupName(), parent().name(), inner().targetResourceId()); } @Override public NetworkWatcherImpl parent() { return parent; } @Override public String key() { return null; } @Override public String targetResourceId() { return inner().targetResourceId(); } @Override public String storageId() { return inner().storageId(); } @Override public boolean enabled() { return inner().enabled(); } @Override public boolean isRetentionEnabled() { ensureRetentionPolicy(); return inner().retentionPolicy().enabled(); } @Override public int retentionDays() { ensureRetentionPolicy(); return inner().retentionPolicy().days(); } @Override public String networkSecurityGroupId() { return nsgId; } }
`outputCompletedElements` is called after an element has been completed by the asyncUserFunction. Only at that point, the queue entries have actually a changed state. There is no other code path that can actually make any queue entry completed. So consuming the queue at any other location will not yield a different result. The only other way an item can be actually be completed is if we insert a watermark into an empty queue, as watermarks are always completed.
private ResultFuture<OUT> addToWorkQueue(StreamElement streamElement) throws InterruptedException { assert(Thread.holdsLock(checkpointingLock)); pendingStreamElement = streamElement; Optional<ResultFuture<OUT>> queueEntry; while (!(queueEntry = queue.tryPut(streamElement)).isPresent()) { mailboxExecutor.yield(); } pendingStreamElement = null; return queueEntry.get(); }
assert(Thread.holdsLock(checkpointingLock));
private ResultFuture<OUT> addToWorkQueue(StreamElement streamElement) throws InterruptedException { assert(Thread.holdsLock(checkpointingLock)); Optional<ResultFuture<OUT>> queueEntry; while (!(queueEntry = queue.tryPut(streamElement)).isPresent()) { mailboxExecutor.yield(); } return queueEntry.get(); }
class AsyncWaitOperator<IN, OUT> extends AbstractUdfStreamOperator<OUT, AsyncFunction<IN, OUT>> implements OneInputStreamOperator<IN, OUT>, BoundedOneInput { private static final long serialVersionUID = 1L; private static final String STATE_NAME = "_async_wait_operator_state_"; /** Capacity of the stream element queue. */ private final int capacity; /** Output mode for this operator. */ private final AsyncDataStream.OutputMode outputMode; /** Timeout for the async collectors. */ private final long timeout; private transient Object checkpointingLock; /** {@link TypeSerializer} for inputs while making snapshots. */ private transient StreamElementSerializer<IN> inStreamElementSerializer; /** Recovered input stream elements. */ private transient ListState<StreamElement> recoveredStreamElements; /** Queue, into which to store the currently in-flight stream elements. */ private transient StreamElementQueue<OUT> queue; /** Pending stream element which could not yet added to the queue. */ private transient StreamElement pendingStreamElement; /** Mailbox executor used to yield while waiting for buffers to empty. */ private final transient MailboxExecutor mailboxExecutor; public AsyncWaitOperator( @Nonnull AsyncFunction<IN, OUT> asyncFunction, long timeout, int capacity, @Nonnull AsyncDataStream.OutputMode outputMode, @Nonnull MailboxExecutor mailboxExecutor) { super(asyncFunction); setChainingStrategy(ChainingStrategy.HEAD); Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0."); this.capacity = capacity; this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode"); this.timeout = timeout; this.mailboxExecutor = mailboxExecutor; } @Override public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) { super.setup(containingTask, config, output); this.checkpointingLock = getContainingTask().getCheckpointLock(); this.inStreamElementSerializer = new StreamElementSerializer<>( getOperatorConfig().<IN>getTypeSerializerIn1(getUserCodeClassloader())); switch (outputMode) { case ORDERED: queue = new OrderedStreamElementQueue<>(capacity); break; case UNORDERED: queue = new UnorderedStreamElementQueue<>(capacity); break; default: throw new IllegalStateException("Unknown async mode: " + outputMode + '.'); } } @Override public void open() throws Exception { super.open(); if (recoveredStreamElements != null) { for (StreamElement element : recoveredStreamElements.get()) { if (element.isRecord()) { processElement(element.<IN>asRecord()); } else if (element.isWatermark()) { processWatermark(element.asWatermark()); } else if (element.isLatencyMarker()) { processLatencyMarker(element.asLatencyMarker()); } else { throw new IllegalStateException("Unknown record type " + element.getClass() + " encountered while opening the operator."); } } recoveredStreamElements = null; } } @Override public void processElement(final StreamRecord<IN> element) throws Exception { final ResultFuture<OUT> entry = addToWorkQueue(element); final ResultHandler resultHandler = new ResultHandler(element, entry); if (timeout > 0L) { final long timeoutTimestamp = timeout + getProcessingTimeService().getCurrentProcessingTime(); final ScheduledFuture<?> timeoutTimer = getProcessingTimeService().registerTimer( timeoutTimestamp, timestamp -> userFunction.timeout(element.getValue(), resultHandler)); resultHandler.setTimeoutTimer(timeoutTimer); } userFunction.asyncInvoke(element.getValue(), resultHandler); } @Override public void processWatermark(Watermark mark) throws Exception { addToWorkQueue(mark); outputCompletedElements(); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); ListState<StreamElement> partitionableState = getOperatorStateBackend().getListState(new ListStateDescriptor<>(STATE_NAME, inStreamElementSerializer)); partitionableState.clear(); try { partitionableState.addAll(queue.values()); if (pendingStreamElement != null) { partitionableState.add(pendingStreamElement); } } catch (Exception e) { partitionableState.clear(); throw new Exception("Could not add stream element queue entries to operator state " + "backend of operator " + getOperatorName() + '.', e); } } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); recoveredStreamElements = context .getOperatorStateStore() .getListState(new ListStateDescriptor<>(STATE_NAME, inStreamElementSerializer)); } @Override public void endInput() throws Exception { waitInFlightInputsFinished(); } @Override public void close() throws Exception { try { waitInFlightInputsFinished(); } finally { super.close(); } } /** * Add the given stream element to the operator's stream element queue. This operation blocks until the element * has been added. * * <p>Between two insertion attempts, this method yields the execution to the mailbox, such that events as well * as asynchronous results can be processed. * * @param streamElement to add to the operator's queue * @throws InterruptedException if the current thread has been interrupted while yielding to mailbox * @return a handle that allows to set the result of the async computation for the given element. */ private void waitInFlightInputsFinished() throws InterruptedException { assert(Thread.holdsLock(checkpointingLock)); while (!queue.isEmpty()) { mailboxExecutor.yield(); } } /** * Batch output of all completed elements. Watermarks are always completed if it's their turn to be processed. * * <p>This method will be called from {@link * of an async function call.</p> */ private void outputCompletedElements() { if (queue.hasCompleted()) { synchronized (checkpointingLock) { queue.emitCompleted(output); } } } /** * A handler for the results of a specific input record. */ private class ResultHandler implements ResultFuture<OUT> { /** * Optional timeout timer used to signal the timeout to the asyncUserFunction. */ private ScheduledFuture<?> timeoutTimer; /** * Record for which this result handler exists. Used only to report errors. */ private final StreamRecord<IN> inputRecord; /** * The handle received from the queue to update the entry. Should only be used to inject the result; * exceptions are handled here. */ private final ResultFuture<OUT> resultFuture; ResultHandler(StreamRecord<IN> inputRecord, ResultFuture<OUT> resultFuture) { this.inputRecord = inputRecord; this.resultFuture = resultFuture; } void setTimeoutTimer(ScheduledFuture<?> timeoutTimer) { this.timeoutTimer = timeoutTimer; } @Override public void complete(Collection<OUT> result) { mailboxExecutor.execute(() -> { if (timeoutTimer != null) { timeoutTimer.cancel(true); } resultFuture.complete(result); outputCompletedElements(); }); } @Override public void completeExceptionally(Throwable error) { getContainingTask().getEnvironment().failExternally(new Exception( "Could not complete the stream element: " + inputRecord + '.', error)); complete(Collections.emptyList()); } } }
class AsyncWaitOperator<IN, OUT> extends AbstractUdfStreamOperator<OUT, AsyncFunction<IN, OUT>> implements OneInputStreamOperator<IN, OUT>, BoundedOneInput { private static final long serialVersionUID = 1L; private static final String STATE_NAME = "_async_wait_operator_state_"; /** Capacity of the stream element queue. */ private final int capacity; /** Output mode for this operator. */ private final AsyncDataStream.OutputMode outputMode; /** Timeout for the async collectors. */ private final long timeout; private transient Object checkpointingLock; /** {@link TypeSerializer} for inputs while making snapshots. */ private transient StreamElementSerializer<IN> inStreamElementSerializer; /** Recovered input stream elements. */ private transient ListState<StreamElement> recoveredStreamElements; /** Queue, into which to store the currently in-flight stream elements. */ private transient StreamElementQueue<OUT> queue; /** Mailbox executor used to yield while waiting for buffers to empty. */ private final transient MailboxExecutor mailboxExecutor; private transient TimestampedCollector<OUT> timestampedCollector; public AsyncWaitOperator( @Nonnull AsyncFunction<IN, OUT> asyncFunction, long timeout, int capacity, @Nonnull AsyncDataStream.OutputMode outputMode, @Nonnull MailboxExecutor mailboxExecutor) { super(asyncFunction); setChainingStrategy(ChainingStrategy.HEAD); Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0."); this.capacity = capacity; this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode"); this.timeout = timeout; this.mailboxExecutor = mailboxExecutor; } @Override public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) { super.setup(containingTask, config, output); this.checkpointingLock = getContainingTask().getCheckpointLock(); this.inStreamElementSerializer = new StreamElementSerializer<>( getOperatorConfig().<IN>getTypeSerializerIn1(getUserCodeClassloader())); switch (outputMode) { case ORDERED: queue = new OrderedStreamElementQueue<>(capacity); break; case UNORDERED: queue = new UnorderedStreamElementQueue<>(capacity); break; default: throw new IllegalStateException("Unknown async mode: " + outputMode + '.'); } this.timestampedCollector = new TimestampedCollector<>(output); } @Override public void open() throws Exception { super.open(); if (recoveredStreamElements != null) { for (StreamElement element : recoveredStreamElements.get()) { if (element.isRecord()) { processElement(element.<IN>asRecord()); } else if (element.isWatermark()) { processWatermark(element.asWatermark()); } else if (element.isLatencyMarker()) { processLatencyMarker(element.asLatencyMarker()); } else { throw new IllegalStateException("Unknown record type " + element.getClass() + " encountered while opening the operator."); } } recoveredStreamElements = null; } } @Override public void processElement(StreamRecord<IN> element) throws Exception { final ResultFuture<OUT> entry = addToWorkQueue(element); final ResultHandler resultHandler = new ResultHandler(element, entry); if (timeout > 0L) { final long timeoutTimestamp = timeout + getProcessingTimeService().getCurrentProcessingTime(); final ScheduledFuture<?> timeoutTimer = getProcessingTimeService().registerTimer( timeoutTimestamp, timestamp -> userFunction.timeout(element.getValue(), resultHandler)); resultHandler.setTimeoutTimer(timeoutTimer); } userFunction.asyncInvoke(element.getValue(), resultHandler); } @Override public void processWatermark(Watermark mark) throws Exception { addToWorkQueue(mark); outputCompletedElement(); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); ListState<StreamElement> partitionableState = getOperatorStateBackend().getListState(new ListStateDescriptor<>(STATE_NAME, inStreamElementSerializer)); partitionableState.clear(); try { partitionableState.addAll(queue.values()); } catch (Exception e) { partitionableState.clear(); throw new Exception("Could not add stream element queue entries to operator state " + "backend of operator " + getOperatorName() + '.', e); } } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); recoveredStreamElements = context .getOperatorStateStore() .getListState(new ListStateDescriptor<>(STATE_NAME, inStreamElementSerializer)); } @Override public void endInput() throws Exception { waitInFlightInputsFinished(); } @Override public void close() throws Exception { try { waitInFlightInputsFinished(); } finally { super.close(); } } /** * Add the given stream element to the operator's stream element queue. This operation blocks until the element * has been added. * * <p>Between two insertion attempts, this method yields the execution to the mailbox, such that events as well * as asynchronous results can be processed. * * @param streamElement to add to the operator's queue * @throws InterruptedException if the current thread has been interrupted while yielding to mailbox * @return a handle that allows to set the result of the async computation for the given element. */ private void waitInFlightInputsFinished() throws InterruptedException { assert(Thread.holdsLock(checkpointingLock)); while (!queue.isEmpty()) { mailboxExecutor.yield(); } } /** * Outputs one completed element. Watermarks are always completed if it's their turn to be processed. * * <p>This method will be called from {@link * of an async function call. */ private void outputCompletedElement() { if (queue.hasCompletedElements()) { synchronized (checkpointingLock) { queue.emitCompletedElement(timestampedCollector); } if (queue.hasCompletedElements()) { mailboxExecutor.execute(this::outputCompletedElement); } } } /** * A handler for the results of a specific input record. */ private class ResultHandler implements ResultFuture<OUT> { /** * Optional timeout timer used to signal the timeout to the AsyncFunction. */ private ScheduledFuture<?> timeoutTimer; /** * Record for which this result handler exists. Used only to report errors. */ private final StreamRecord<IN> inputRecord; /** * The handle received from the queue to update the entry. Should only be used to inject the result; * exceptions are handled here. */ private final ResultFuture<OUT> resultFuture; /** * A guard against ill-written AsyncFunction. Additional (parallel) invokations of * {@link * also helps for cases where proper results and timeouts happen at the same time. */ private final AtomicBoolean completed = new AtomicBoolean(false); ResultHandler(StreamRecord<IN> inputRecord, ResultFuture<OUT> resultFuture) { this.inputRecord = inputRecord; this.resultFuture = resultFuture; } void setTimeoutTimer(ScheduledFuture<?> timeoutTimer) { this.timeoutTimer = timeoutTimer; } @Override public void complete(Collection<OUT> results) { Preconditions.checkNotNull(results, "Results must not be null, use empty collection to emit nothing"); if (!completed.compareAndSet(false, true)) { return; } processInMailbox(results); } private void processInMailbox(Collection<OUT> results) { mailboxExecutor.execute(() -> { if (timeoutTimer != null) { timeoutTimer.cancel(true); } resultFuture.complete(results); outputCompletedElement(); }); } @Override public void completeExceptionally(Throwable error) { if (!completed.compareAndSet(false, true)) { return; } getContainingTask().getEnvironment().failExternally(new Exception( "Could not complete the stream element: " + inputRecord + '.', error)); processInMailbox(Collections.emptyList()); } } }
The most risky bug in this code is: Incorrect logic in the `checkComplexTypeInvalid` function may lead to incorrect pruning of columns based on their types and access paths, potentially causing data integrity issues or incorrect query results. You can modify the code like this: ```java + if (checkComplexTypeInvalid(scan, column)) { - continue; + // Depending on your intended logic, either handle the invalid state or correct the condition. + // For example, removing 'continue;' may skip the erroneous skipping of process for certain columns, + // but ensure to validate the desired behavior and adjust the method implementation accordingly. } ``` And ensure that `checkComplexTypeInvalid` accurately reflects the intended logic, especially regarding the conditions under which a complex type is considered "invalid" for pruning. The current logic suggests a column is deemed invalid if all children have the `TAccessPathType.OFFSET` type, but the real intention might necessitate revising these checks or clarifying the conditions that define an invalid state for processing in this context.
private void collectPredicate(Operator operator, DecodeInfo info) { if (operator.getPredicate() == null) { return; } DictExpressionCollector dictExpressionCollector = new DictExpressionCollector(info.outputStringColumns); dictExpressionCollector.collect(operator.getPredicate()); info.outputStringColumns.getStream().forEach(c -> { List<ScalarOperator> expressions = dictExpressionCollector.getDictExpressions(c); if (!expressions.isEmpty()) { stringExpressions.computeIfAbsent(c, l -> Lists.newArrayList()).addAll(expressions); } }); }
return;
private void collectPredicate(Operator operator, DecodeInfo info) { if (operator.getPredicate() == null) { return; } DictExpressionCollector dictExpressionCollector = new DictExpressionCollector(info.outputStringColumns); dictExpressionCollector.collect(operator.getPredicate()); info.outputStringColumns.getStream().forEach(c -> { List<ScalarOperator> expressions = dictExpressionCollector.getDictExpressions(c); if (!expressions.isEmpty()) { stringExpressions.computeIfAbsent(c, l -> Lists.newArrayList()).addAll(expressions); } }); }
class DecodeCollector extends OptExpressionVisitor<DecodeInfo, DecodeInfo> { private static final Logger LOG = LogManager.getLogger(DecodeCollector.class); public static final Set<String> LOW_CARD_AGGREGATE_FUNCTIONS = Sets.newHashSet(FunctionSet.COUNT, FunctionSet.MULTI_DISTINCT_COUNT, FunctionSet.MAX, FunctionSet.MIN, FunctionSet.APPROX_COUNT_DISTINCT); public static final Set<String> LOW_CARD_STRING_FUNCTIONS = ImmutableSet.of(FunctionSet.APPEND_TRAILING_CHAR_IF_ABSENT, FunctionSet.CONCAT, FunctionSet.CONCAT_WS, FunctionSet.HEX, FunctionSet.LEFT, FunctionSet.LIKE, FunctionSet.LOWER, FunctionSet.LPAD, FunctionSet.LTRIM, FunctionSet.REGEXP_EXTRACT, FunctionSet.REGEXP_REPLACE, FunctionSet.REPEAT, FunctionSet.REPLACE, FunctionSet.REVERSE, FunctionSet.RIGHT, FunctionSet.RPAD, FunctionSet.RTRIM, FunctionSet.SPLIT_PART, FunctionSet.SUBSTR, FunctionSet.SUBSTRING, FunctionSet.SUBSTRING_INDEX, FunctionSet.TRIM, FunctionSet.UPPER, FunctionSet.IF); public static final Set<String> LOW_CARD_ARRAY_FUNCTIONS = ImmutableSet.of( FunctionSet.ARRAY_MIN, FunctionSet.ARRAY_MAX, FunctionSet.ARRAY_DISTINCT, FunctionSet.ARRAY_SORT, FunctionSet.REVERSE, FunctionSet.ARRAY_SLICE, FunctionSet.ARRAY_FILTER, FunctionSet.ARRAY_LENGTH, FunctionSet.CARDINALITY); private final SessionVariable sessionVariable; private final Map<Operator, DecodeInfo> allOperatorDecodeInfo = Maps.newIdentityHashMap(); private final Map<Integer, ColumnDict> globalDicts = Maps.newHashMap(); private final Map<Integer, List<ScalarOperator>> stringExpressions = Maps.newHashMap(); private final Map<Integer, List<CallOperator>> stringAggregateExpressions = Maps.newHashMap(); private final Map<Integer, ScalarOperator> stringRefToDefineExprMap = Maps.newHashMap(); private final Map<Integer, Integer> expressionStringRefCounter = Maps.newHashMap(); private final List<Integer> scanStringColumns = Lists.newArrayList(); public DecodeCollector(SessionVariable session) { this.sessionVariable = session; } public void collect(OptExpression root, DecodeContext context) { collectImpl(root, null); initContext(context); } private void initContext(DecodeContext context) { for (Integer cid : scanStringColumns) { if (expressionStringRefCounter.getOrDefault(cid, 0) > 1) { context.allStringColumns.add(cid); continue; } List<ScalarOperator> dictExprList = stringExpressions.getOrDefault(cid, Collections.emptyList()); long allExprNum = dictExprList.size(); long worthless = dictExprList.stream().filter(ScalarOperator::isColumnRef).count(); if (worthless == 0 && allExprNum != 0) { context.allStringColumns.add(cid); } else if (allExprNum > worthless && allExprNum >= worthless * 2) { context.allStringColumns.add(cid); } } for (Integer cid : stringRefToDefineExprMap.keySet()) { if (context.allStringColumns.contains(cid)) { continue; } if (!checkDependOnExpr(cid, context.allStringColumns)) { continue; } if (globalDicts.containsKey(cid) || expressionStringRefCounter.getOrDefault(cid, 0) != 0) { context.allStringColumns.add(cid); } } for (Integer cid : context.allStringColumns) { if (globalDicts.containsKey(cid)) { context.stringRefToDicts.put(cid, globalDicts.get(cid)); } if (stringRefToDefineExprMap.containsKey(cid)) { context.stringRefToDefineExprMap.put(cid, stringRefToDefineExprMap.get(cid)); } if (stringExpressions.containsKey(cid)) { context.stringExprsMap.put(cid, stringExpressions.get(cid)); } } for (Integer aggregateId : stringAggregateExpressions.keySet()) { List<CallOperator> aggregateExprs = stringAggregateExpressions.get(aggregateId); for (CallOperator agg : aggregateExprs) { if (agg.getColumnRefs().stream().map(ColumnRefOperator::getId) .anyMatch(context.allStringColumns::contains)) { context.stringAggregateExprs.addAll(aggregateExprs); context.allStringColumns.add(aggregateId); break; } } } ColumnRefSet alls = new ColumnRefSet(); context.allStringColumns.forEach(alls::union); for (Operator operator : allOperatorDecodeInfo.keySet()) { DecodeInfo info = allOperatorDecodeInfo.get(operator); info.outputStringColumns.intersect(alls); info.decodeStringColumns.intersect(alls); info.inputStringColumns.intersect(alls); if (!info.isEmpty()) { context.operatorDecodeInfo.put(operator, info); } } } private boolean checkDependOnExpr(int cid, Collection<Integer> checkList) { if (checkList.contains(cid)) { return true; } if (!stringRefToDefineExprMap.containsKey(cid)) { return false; } ScalarOperator define = stringRefToDefineExprMap.get(cid); for (ColumnRefOperator ref : define.getColumnRefs()) { if (ref.getId() == cid) { return false; } if (!checkDependOnExpr(ref.getId(), checkList)) { return false; } } return true; } private DecodeInfo collectImpl(OptExpression optExpression, OptExpression parent) { DecodeInfo context; if (optExpression.arity() == 1) { OptExpression child = optExpression.inputAt(0); context = collectImpl(child, optExpression); } else { context = new DecodeInfo(); for (int i = 0; i < optExpression.arity(); ++i) { OptExpression child = optExpression.inputAt(i); context.addChildInfo(collectImpl(child, optExpression)); } } context.parent = parent; DecodeInfo info = optExpression.getOp().accept(this, optExpression, context); if (info.isEmpty()) { return info; } info.decodeStringColumns.getStream().forEach(c -> { if (expressionStringRefCounter.getOrDefault(c, -1) == 0) { expressionStringRefCounter.remove(c); } }); info.inputStringColumns.getStream().forEach(c -> { if (expressionStringRefCounter.containsKey(c)) { expressionStringRefCounter.put(c, expressionStringRefCounter.get(c) + 1); } }); allOperatorDecodeInfo.put(optExpression.getOp(), info); collectPredicate(optExpression.getOp(), info); collectProjection(optExpression.getOp(), info); return info; } @Override public DecodeInfo visit(OptExpression optExpression, DecodeInfo context) { return context.createDecodeInfo(); } @Override public DecodeInfo visitPhysicalLimit(OptExpression optExpression, DecodeInfo context) { return context.createOutputInfo(); } @Override public DecodeInfo visitPhysicalTopN(OptExpression optExpression, DecodeInfo context) { return context.createOutputInfo(); } @Override public DecodeInfo visitPhysicalJoin(OptExpression optExpression, DecodeInfo context) { if (context.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } PhysicalJoinOperator join = optExpression.getOp().cast(); DecodeInfo result = context.createOutputInfo(); if (join.getOnPredicate() == null) { return result; } ColumnRefSet onColumns = join.getOnPredicate().getUsedColumns(); if (!result.inputStringColumns.containsAny(onColumns)) { return result; } result.outputStringColumns.clear(); result.inputStringColumns.getStream().forEach(c -> { if (onColumns.contains(c)) { result.decodeStringColumns.union(c); } else { result.outputStringColumns.union(c); } }); result.inputStringColumns.except(result.decodeStringColumns); return result; } @Override public DecodeInfo visitPhysicalHashAggregate(OptExpression optExpression, DecodeInfo context) { if (context.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } PhysicalHashAggregateOperator aggregate = optExpression.getOp().cast(); DecodeInfo info = context.createOutputInfo(); ColumnRefSet disableColumns = new ColumnRefSet(); for (ColumnRefOperator key : aggregate.getAggregations().keySet()) { CallOperator agg = aggregate.getAggregations().get(key); if (!LOW_CARD_AGGREGATE_FUNCTIONS.contains(agg.getFnName())) { disableColumns.union(agg.getUsedColumns()); disableColumns.union(key); continue; } if (agg.getChildren().size() != 1 || !agg.getChildren().get(0).isColumnRef()) { disableColumns.union(agg.getUsedColumns()); disableColumns.union(key); } } if (!disableColumns.isEmpty()) { info.decodeStringColumns.union(info.inputStringColumns); info.decodeStringColumns.intersect(disableColumns); info.inputStringColumns.except(info.decodeStringColumns); } info.outputStringColumns.clear(); for (ColumnRefOperator key : aggregate.getAggregations().keySet()) { if (disableColumns.contains(key)) { continue; } CallOperator value = aggregate.getAggregations().get(key); if (!info.inputStringColumns.containsAll(value.getUsedColumns())) { continue; } stringAggregateExpressions.computeIfAbsent(key.getId(), x -> Lists.newArrayList()).add(value); if (FunctionSet.MAX.equals(value.getFnName()) || FunctionSet.MIN.equals(value.getFnName())) { info.outputStringColumns.union(key.getId()); stringRefToDefineExprMap.putIfAbsent(key.getId(), value); expressionStringRefCounter.put(key.getId(), 1); } else if (aggregate.getType().isLocal() || aggregate.getType().isDistinctLocal()) { info.outputStringColumns.union(key.getId()); } } for (ColumnRefOperator groupBy : aggregate.getGroupBys()) { if (info.inputStringColumns.contains(groupBy) && !info.decodeStringColumns.contains(groupBy)) { info.outputStringColumns.union(groupBy); } } for (ColumnRefOperator partition : aggregate.getPartitionByColumns()) { if (info.inputStringColumns.contains(partition) && !info.decodeStringColumns.contains(partition)) { info.outputStringColumns.union(partition); } } return info; } @Override public DecodeInfo visitPhysicalDistribution(OptExpression optExpression, DecodeInfo context) { if (context.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } DecodeInfo result = context.createOutputInfo(); if (context.parent != null && context.parent.getOp() instanceof PhysicalJoinOperator) { return visitPhysicalJoin(context.parent, context); } return result; } @Override public DecodeInfo visitPhysicalOlapScan(OptExpression optExpression, DecodeInfo context) { PhysicalOlapScanOperator scan = optExpression.getOp().cast(); OlapTable table = (OlapTable) scan.getTable(); long version = table.getPartitions().stream().map(Partition::getVisibleVersionTime).max(Long::compareTo) .orElse(0L); if ((table.getKeysType().equals(KeysType.PRIMARY_KEYS))) { return DecodeInfo.EMPTY; } if (table.hasForbiddenGlobalDict()) { return DecodeInfo.EMPTY; } DecodeInfo info = new DecodeInfo(); for (ColumnRefOperator column : scan.getColRefToColumnMetaMap().keySet()) { if (!supportLowCardinality(column.getType())) { continue; } if (!sessionVariable.isEnableArrayLowCardinalityOptimize() && column.getType().isArrayType()) { continue; } if (!checkComplexTypeInvalid(scan, column)) { continue; } ColumnStatistic columnStatistic = GlobalStateMgr.getCurrentState().getStatisticStorage() .getColumnStatistic(table, column.getName()); if (!FeConstants.USE_MOCK_DICT_MANAGER && (columnStatistic.isUnknown() || columnStatistic.getDistinctValuesCount() > CacheDictManager.LOW_CARDINALITY_THRESHOLD)) { LOG.debug("{} isn't low cardinality string column", column.getName()); continue; } if (!IDictManager.getInstance().hasGlobalDict(table.getId(), column.getName(), version)) { LOG.debug("{} doesn't have global dict", column.getName()); continue; } Optional<ColumnDict> dict = IDictManager.getInstance().getGlobalDict(table.getId(), column.getName()); if (dict.isEmpty()) { continue; } info.outputStringColumns.union(column); info.inputStringColumns.union(column); stringRefToDefineExprMap.put(column.getId(), column); scanStringColumns.add(column.getId()); expressionStringRefCounter.put(column.getId(), 0); globalDicts.put(column.getId(), dict.get()); } if (info.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } return info; } private boolean checkComplexTypeInvalid(PhysicalOlapScanOperator scan, ColumnRefOperator column) { String colName = scan.getColRefToColumnMetaMap().get(column).getName(); for (ColumnAccessPath path : scan.getColumnAccessPaths()) { if (!StringUtils.equalsIgnoreCase(colName, path.getPath()) || path.getType() != TAccessPathType.ROOT) { continue; } if (path.getChildren().stream().allMatch(p -> p.getType() == TAccessPathType.OFFSET)) { return false; } } return true; } private void collectProjection(Operator operator, DecodeInfo info) { if (operator.getProjection() == null) { return; } ColumnRefSet decodeInput = info.outputStringColumns; info.outputStringColumns = new ColumnRefSet(); for (ColumnRefOperator key : operator.getProjection().getColumnRefMap().keySet()) { if (decodeInput.contains(key)) { info.outputStringColumns.union(key.getId()); continue; } DictExpressionCollector dictExpressionCollector = new DictExpressionCollector(decodeInput); ScalarOperator value = operator.getProjection().getColumnRefMap().get(key); dictExpressionCollector.collect(value); decodeInput.getStream().forEach(c -> { List<ScalarOperator> exprs = dictExpressionCollector.getDictExpressions(c); if (!exprs.isEmpty()) { stringExpressions.computeIfAbsent(c, l -> Lists.newArrayList()).addAll(exprs); } if (exprs.contains(value) && supportLowCardinality(value.getType())) { stringRefToDefineExprMap.put(key.getId(), value); expressionStringRefCounter.putIfAbsent(key.getId(), 0); info.outputStringColumns.union(key.getId()); } }); } } private static boolean supportLowCardinality(Type type) { return type.isVarchar() || (type.isArrayType() && ((ArrayType) type).getItemType().isVarchar()); } private static class DictExpressionCollector extends ScalarOperatorVisitor<ScalarOperator, Void> { private static final ScalarOperator CONSTANTS = ConstantOperator.TRUE; private static final ScalarOperator VARIABLES = ConstantOperator.FALSE; private final ColumnRefSet allDictColumnRefs; private final Map<Integer, List<ScalarOperator>> dictExpressions = Maps.newHashMap(); public DictExpressionCollector(ColumnRefSet allDictColumnRefs) { this.allDictColumnRefs = allDictColumnRefs; } public void collect(ScalarOperator scalarOperator) { ScalarOperator dictColumn = scalarOperator.accept(this, null); saveDictExpr(dictColumn, scalarOperator); } private void saveDictExpr(ScalarOperator dictColumn, ScalarOperator dictExpr) { if (dictColumn.isColumnRef()) { dictExpressions.computeIfAbsent(((ColumnRefOperator) dictColumn).getId(), x -> Lists.newArrayList()).add(dictExpr); } else if (!dictColumn.isConstant()) { List<ColumnRefOperator> used = dictColumn.getColumnRefs(); Preconditions.checkState(used.stream().distinct().count() == 1); this.dictExpressions.computeIfAbsent(used.get(0).getId(), x -> Lists.newArrayList()).add(dictExpr); } } public List<ScalarOperator> getDictExpressions(int columnId) { if (!dictExpressions.containsKey(columnId)) { return Collections.emptyList(); } return dictExpressions.get(columnId); } public List<ScalarOperator> visitChildren(ScalarOperator operator, Void context) { List<ScalarOperator> children = Lists.newArrayList(); for (ScalarOperator child : operator.getChildren()) { children.add(child.accept(this, context)); } return children; } private ScalarOperator mergeWithArray(List<ScalarOperator> collectors, ScalarOperator scalarOperator) { if (collectors.stream().allMatch(CONSTANTS::equals)) { return CONSTANTS; } long variableExpr = collectors.stream().filter(VARIABLES::equals).count(); long dictCount = collectors.stream().filter(s -> !s.isConstant()).distinct().count(); if (dictCount == 1 && variableExpr == 0) { return collectors.stream().filter(s -> !s.isConstant()).findFirst().get(); } for (int i = 0; i < collectors.size(); i++) { saveDictExpr(collectors.get(i), scalarOperator.getChild(i)); } return VARIABLES; } private ScalarOperator forbidden(List<ScalarOperator> collectors, ScalarOperator scalarOperator) { if (collectors.stream().allMatch(CONSTANTS::equals)) { return CONSTANTS; } for (int i = 0; i < collectors.size(); i++) { saveDictExpr(collectors.get(i), scalarOperator.getChild(i)); } return VARIABLES; } private ScalarOperator merge(List<ScalarOperator> collectors, ScalarOperator scalarOperator) { if (collectors.stream().anyMatch(s -> s.getType().isArrayType())) { return forbidden(collectors, scalarOperator); } return mergeWithArray(collectors, scalarOperator); } @Override public ScalarOperator visit(ScalarOperator scalarOperator, Void context) { return forbidden(visitChildren(scalarOperator, context), scalarOperator); } @Override public ScalarOperator visitVariableReference(ColumnRefOperator variable, Void context) { if (allDictColumnRefs.contains(variable)) { return variable; } return VARIABLES; } @Override public ScalarOperator visitConstant(ConstantOperator literal, Void context) { return CONSTANTS; } @Override public ScalarOperator visitCall(CallOperator call, Void context) { if (FunctionSet.nonDeterministicFunctions.contains(call.getFnName())) { return VARIABLES; } if (FunctionSet.ARRAY_FILTER.equalsIgnoreCase(call.getFnName())) { List<ScalarOperator> result = visitChildren(call, context); return CONSTANTS.equals(result.get(1)) ? mergeWithArray(result, call) : forbidden(result, call); } if (FunctionSet.ARRAY_MIN.equalsIgnoreCase(call.getFnName()) || FunctionSet.ARRAY_MAX.equalsIgnoreCase(call.getFnName())) { ScalarOperator result = mergeWithArray(visitChildren(call, context), call); return !result.isConstant() ? call : result; } if (LOW_CARD_STRING_FUNCTIONS.contains(call.getFnName()) || LOW_CARD_ARRAY_FUNCTIONS.contains(call.getFnName()) || LOW_CARD_AGGREGATE_FUNCTIONS.contains(call.getFnName())) { return mergeWithArray(visitChildren(call, context), call); } return forbidden(visitChildren(call, context), call); } @Override public ScalarOperator visitCollectionElement(CollectionElementOperator collectionElementOp, Void context) { List<ScalarOperator> children = visitChildren(collectionElementOp, context); if (supportLowCardinality(collectionElementOp.getChild(0).getType())) { ScalarOperator result = mergeWithArray(children, collectionElementOp); return !result.isConstant() ? collectionElementOp : result; } return forbidden(children, collectionElementOp); } @Override public ScalarOperator visitBinaryPredicate(BinaryPredicateOperator predicate, Void context) { if (predicate.getBinaryType() == EQ_FOR_NULL) { return forbidden(visitChildren(predicate, context), predicate); } return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitCastOperator(CastOperator operator, Void context) { return merge(visitChildren(operator, context), operator); } @Override public ScalarOperator visitLikePredicateOperator(LikePredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitCompoundPredicate(CompoundPredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitInPredicate(InPredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitIsNullPredicate(IsNullPredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitCaseWhenOperator(CaseWhenOperator operator, Void context) { return merge(visitChildren(operator, context), operator); } } }
class DecodeCollector extends OptExpressionVisitor<DecodeInfo, DecodeInfo> { private static final Logger LOG = LogManager.getLogger(DecodeCollector.class); public static final Set<String> LOW_CARD_AGGREGATE_FUNCTIONS = Sets.newHashSet(FunctionSet.COUNT, FunctionSet.MULTI_DISTINCT_COUNT, FunctionSet.MAX, FunctionSet.MIN, FunctionSet.APPROX_COUNT_DISTINCT); public static final Set<String> LOW_CARD_STRING_FUNCTIONS = ImmutableSet.of(FunctionSet.APPEND_TRAILING_CHAR_IF_ABSENT, FunctionSet.CONCAT, FunctionSet.CONCAT_WS, FunctionSet.HEX, FunctionSet.LEFT, FunctionSet.LIKE, FunctionSet.LOWER, FunctionSet.LPAD, FunctionSet.LTRIM, FunctionSet.REGEXP_EXTRACT, FunctionSet.REGEXP_REPLACE, FunctionSet.REPEAT, FunctionSet.REPLACE, FunctionSet.REVERSE, FunctionSet.RIGHT, FunctionSet.RPAD, FunctionSet.RTRIM, FunctionSet.SPLIT_PART, FunctionSet.SUBSTR, FunctionSet.SUBSTRING, FunctionSet.SUBSTRING_INDEX, FunctionSet.TRIM, FunctionSet.UPPER, FunctionSet.IF); public static final Set<String> LOW_CARD_ARRAY_FUNCTIONS = ImmutableSet.of( FunctionSet.ARRAY_MIN, FunctionSet.ARRAY_MAX, FunctionSet.ARRAY_DISTINCT, FunctionSet.ARRAY_SORT, FunctionSet.REVERSE, FunctionSet.ARRAY_SLICE, FunctionSet.ARRAY_FILTER, FunctionSet.ARRAY_LENGTH, FunctionSet.CARDINALITY); private final SessionVariable sessionVariable; private final Map<Operator, DecodeInfo> allOperatorDecodeInfo = Maps.newIdentityHashMap(); private final Map<Integer, ColumnDict> globalDicts = Maps.newHashMap(); private final Map<Integer, List<ScalarOperator>> stringExpressions = Maps.newHashMap(); private final Map<Integer, List<CallOperator>> stringAggregateExpressions = Maps.newHashMap(); private final Map<Integer, ScalarOperator> stringRefToDefineExprMap = Maps.newHashMap(); private final Map<Integer, Integer> expressionStringRefCounter = Maps.newHashMap(); private final List<Integer> scanStringColumns = Lists.newArrayList(); public DecodeCollector(SessionVariable session) { this.sessionVariable = session; } public void collect(OptExpression root, DecodeContext context) { collectImpl(root, null); initContext(context); } private void initContext(DecodeContext context) { for (Integer cid : scanStringColumns) { if (expressionStringRefCounter.getOrDefault(cid, 0) > 1) { context.allStringColumns.add(cid); continue; } List<ScalarOperator> dictExprList = stringExpressions.getOrDefault(cid, Collections.emptyList()); long allExprNum = dictExprList.size(); long worthless = dictExprList.stream().filter(ScalarOperator::isColumnRef).count(); if (worthless == 0 && allExprNum != 0) { context.allStringColumns.add(cid); } else if (allExprNum > worthless && allExprNum >= worthless * 2) { context.allStringColumns.add(cid); } } for (Integer cid : stringRefToDefineExprMap.keySet()) { if (context.allStringColumns.contains(cid)) { continue; } if (!checkDependOnExpr(cid, context.allStringColumns)) { continue; } if (globalDicts.containsKey(cid) || expressionStringRefCounter.getOrDefault(cid, 0) != 0) { context.allStringColumns.add(cid); } } for (Integer cid : context.allStringColumns) { if (globalDicts.containsKey(cid)) { context.stringRefToDicts.put(cid, globalDicts.get(cid)); } if (stringRefToDefineExprMap.containsKey(cid)) { context.stringRefToDefineExprMap.put(cid, stringRefToDefineExprMap.get(cid)); } if (stringExpressions.containsKey(cid)) { context.stringExprsMap.put(cid, stringExpressions.get(cid)); } } for (Integer aggregateId : stringAggregateExpressions.keySet()) { List<CallOperator> aggregateExprs = stringAggregateExpressions.get(aggregateId); for (CallOperator agg : aggregateExprs) { if (agg.getColumnRefs().stream().map(ColumnRefOperator::getId) .anyMatch(context.allStringColumns::contains)) { context.stringAggregateExprs.addAll(aggregateExprs); context.allStringColumns.add(aggregateId); break; } } } ColumnRefSet alls = new ColumnRefSet(); context.allStringColumns.forEach(alls::union); for (Operator operator : allOperatorDecodeInfo.keySet()) { DecodeInfo info = allOperatorDecodeInfo.get(operator); info.outputStringColumns.intersect(alls); info.decodeStringColumns.intersect(alls); info.inputStringColumns.intersect(alls); if (!info.isEmpty()) { context.operatorDecodeInfo.put(operator, info); } } } private boolean checkDependOnExpr(int cid, Collection<Integer> checkList) { if (checkList.contains(cid)) { return true; } if (!stringRefToDefineExprMap.containsKey(cid)) { return false; } ScalarOperator define = stringRefToDefineExprMap.get(cid); for (ColumnRefOperator ref : define.getColumnRefs()) { if (ref.getId() == cid) { return false; } if (!checkDependOnExpr(ref.getId(), checkList)) { return false; } } return true; } private DecodeInfo collectImpl(OptExpression optExpression, OptExpression parent) { DecodeInfo context; if (optExpression.arity() == 1) { OptExpression child = optExpression.inputAt(0); context = collectImpl(child, optExpression); } else { context = new DecodeInfo(); for (int i = 0; i < optExpression.arity(); ++i) { OptExpression child = optExpression.inputAt(i); context.addChildInfo(collectImpl(child, optExpression)); } } context.parent = parent; DecodeInfo info = optExpression.getOp().accept(this, optExpression, context); if (info.isEmpty()) { return info; } info.decodeStringColumns.getStream().forEach(c -> { if (expressionStringRefCounter.getOrDefault(c, -1) == 0) { expressionStringRefCounter.remove(c); } }); info.inputStringColumns.getStream().forEach(c -> { if (expressionStringRefCounter.containsKey(c)) { expressionStringRefCounter.put(c, expressionStringRefCounter.get(c) + 1); } }); allOperatorDecodeInfo.put(optExpression.getOp(), info); collectPredicate(optExpression.getOp(), info); collectProjection(optExpression.getOp(), info); return info; } @Override public DecodeInfo visit(OptExpression optExpression, DecodeInfo context) { return context.createDecodeInfo(); } @Override public DecodeInfo visitPhysicalLimit(OptExpression optExpression, DecodeInfo context) { return context.createOutputInfo(); } @Override public DecodeInfo visitPhysicalTopN(OptExpression optExpression, DecodeInfo context) { return context.createOutputInfo(); } @Override public DecodeInfo visitPhysicalJoin(OptExpression optExpression, DecodeInfo context) { if (context.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } PhysicalJoinOperator join = optExpression.getOp().cast(); DecodeInfo result = context.createOutputInfo(); if (join.getOnPredicate() == null) { return result; } ColumnRefSet onColumns = join.getOnPredicate().getUsedColumns(); if (!result.inputStringColumns.containsAny(onColumns)) { return result; } result.outputStringColumns.clear(); result.inputStringColumns.getStream().forEach(c -> { if (onColumns.contains(c)) { result.decodeStringColumns.union(c); } else { result.outputStringColumns.union(c); } }); result.inputStringColumns.except(result.decodeStringColumns); return result; } @Override public DecodeInfo visitPhysicalHashAggregate(OptExpression optExpression, DecodeInfo context) { if (context.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } PhysicalHashAggregateOperator aggregate = optExpression.getOp().cast(); DecodeInfo info = context.createOutputInfo(); ColumnRefSet disableColumns = new ColumnRefSet(); for (ColumnRefOperator key : aggregate.getAggregations().keySet()) { CallOperator agg = aggregate.getAggregations().get(key); if (!LOW_CARD_AGGREGATE_FUNCTIONS.contains(agg.getFnName())) { disableColumns.union(agg.getUsedColumns()); disableColumns.union(key); continue; } if (agg.getChildren().size() != 1 || !agg.getChildren().get(0).isColumnRef()) { disableColumns.union(agg.getUsedColumns()); disableColumns.union(key); } } if (!disableColumns.isEmpty()) { info.decodeStringColumns.union(info.inputStringColumns); info.decodeStringColumns.intersect(disableColumns); info.inputStringColumns.except(info.decodeStringColumns); } info.outputStringColumns.clear(); for (ColumnRefOperator key : aggregate.getAggregations().keySet()) { if (disableColumns.contains(key)) { continue; } CallOperator value = aggregate.getAggregations().get(key); if (!info.inputStringColumns.containsAll(value.getUsedColumns())) { continue; } stringAggregateExpressions.computeIfAbsent(key.getId(), x -> Lists.newArrayList()).add(value); if (FunctionSet.MAX.equals(value.getFnName()) || FunctionSet.MIN.equals(value.getFnName())) { info.outputStringColumns.union(key.getId()); stringRefToDefineExprMap.putIfAbsent(key.getId(), value); expressionStringRefCounter.put(key.getId(), 1); } else if (aggregate.getType().isLocal() || aggregate.getType().isDistinctLocal()) { info.outputStringColumns.union(key.getId()); } } for (ColumnRefOperator groupBy : aggregate.getGroupBys()) { if (info.inputStringColumns.contains(groupBy) && !info.decodeStringColumns.contains(groupBy)) { info.outputStringColumns.union(groupBy); } } for (ColumnRefOperator partition : aggregate.getPartitionByColumns()) { if (info.inputStringColumns.contains(partition) && !info.decodeStringColumns.contains(partition)) { info.outputStringColumns.union(partition); } } return info; } @Override public DecodeInfo visitPhysicalDistribution(OptExpression optExpression, DecodeInfo context) { if (context.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } DecodeInfo result = context.createOutputInfo(); if (context.parent != null && context.parent.getOp() instanceof PhysicalJoinOperator) { return visitPhysicalJoin(context.parent, context); } return result; } @Override public DecodeInfo visitPhysicalOlapScan(OptExpression optExpression, DecodeInfo context) { PhysicalOlapScanOperator scan = optExpression.getOp().cast(); OlapTable table = (OlapTable) scan.getTable(); long version = table.getPartitions().stream().map(Partition::getVisibleVersionTime).max(Long::compareTo) .orElse(0L); if ((table.getKeysType().equals(KeysType.PRIMARY_KEYS))) { return DecodeInfo.EMPTY; } if (table.hasForbiddenGlobalDict()) { return DecodeInfo.EMPTY; } DecodeInfo info = new DecodeInfo(); for (ColumnRefOperator column : scan.getColRefToColumnMetaMap().keySet()) { if (!supportLowCardinality(column.getType())) { continue; } if (!sessionVariable.isEnableArrayLowCardinalityOptimize() && column.getType().isArrayType()) { continue; } if (!checkComplexTypeInvalid(scan, column)) { continue; } ColumnStatistic columnStatistic = GlobalStateMgr.getCurrentState().getStatisticStorage() .getColumnStatistic(table, column.getName()); if (!FeConstants.USE_MOCK_DICT_MANAGER && (columnStatistic.isUnknown() || columnStatistic.getDistinctValuesCount() > CacheDictManager.LOW_CARDINALITY_THRESHOLD)) { LOG.debug("{} isn't low cardinality string column", column.getName()); continue; } if (!IDictManager.getInstance().hasGlobalDict(table.getId(), column.getName(), version)) { LOG.debug("{} doesn't have global dict", column.getName()); continue; } Optional<ColumnDict> dict = IDictManager.getInstance().getGlobalDict(table.getId(), column.getName()); if (dict.isEmpty()) { continue; } info.outputStringColumns.union(column); info.inputStringColumns.union(column); stringRefToDefineExprMap.put(column.getId(), column); scanStringColumns.add(column.getId()); expressionStringRefCounter.put(column.getId(), 0); globalDicts.put(column.getId(), dict.get()); } if (info.outputStringColumns.isEmpty()) { return DecodeInfo.EMPTY; } return info; } private boolean checkComplexTypeInvalid(PhysicalOlapScanOperator scan, ColumnRefOperator column) { String colName = scan.getColRefToColumnMetaMap().get(column).getName(); for (ColumnAccessPath path : scan.getColumnAccessPaths()) { if (!StringUtils.equalsIgnoreCase(colName, path.getPath()) || path.getType() != TAccessPathType.ROOT) { continue; } if (path.getChildren().stream().allMatch(p -> p.getType() == TAccessPathType.OFFSET)) { return false; } } return true; } private void collectProjection(Operator operator, DecodeInfo info) { if (operator.getProjection() == null) { return; } ColumnRefSet decodeInput = info.outputStringColumns; info.outputStringColumns = new ColumnRefSet(); for (ColumnRefOperator key : operator.getProjection().getColumnRefMap().keySet()) { if (decodeInput.contains(key)) { info.outputStringColumns.union(key.getId()); continue; } DictExpressionCollector dictExpressionCollector = new DictExpressionCollector(decodeInput); ScalarOperator value = operator.getProjection().getColumnRefMap().get(key); dictExpressionCollector.collect(value); decodeInput.getStream().forEach(c -> { List<ScalarOperator> exprs = dictExpressionCollector.getDictExpressions(c); if (!exprs.isEmpty()) { stringExpressions.computeIfAbsent(c, l -> Lists.newArrayList()).addAll(exprs); } if (exprs.contains(value) && supportLowCardinality(value.getType())) { stringRefToDefineExprMap.put(key.getId(), value); expressionStringRefCounter.putIfAbsent(key.getId(), 0); info.outputStringColumns.union(key.getId()); } }); } } private static boolean supportLowCardinality(Type type) { return type.isVarchar() || (type.isArrayType() && ((ArrayType) type).getItemType().isVarchar()); } private static class DictExpressionCollector extends ScalarOperatorVisitor<ScalarOperator, Void> { private static final ScalarOperator CONSTANTS = ConstantOperator.TRUE; private static final ScalarOperator VARIABLES = ConstantOperator.FALSE; private final ColumnRefSet allDictColumnRefs; private final Map<Integer, List<ScalarOperator>> dictExpressions = Maps.newHashMap(); public DictExpressionCollector(ColumnRefSet allDictColumnRefs) { this.allDictColumnRefs = allDictColumnRefs; } public void collect(ScalarOperator scalarOperator) { ScalarOperator dictColumn = scalarOperator.accept(this, null); saveDictExpr(dictColumn, scalarOperator); } private void saveDictExpr(ScalarOperator dictColumn, ScalarOperator dictExpr) { if (dictColumn.isColumnRef()) { dictExpressions.computeIfAbsent(((ColumnRefOperator) dictColumn).getId(), x -> Lists.newArrayList()).add(dictExpr); } else if (!dictColumn.isConstant()) { List<ColumnRefOperator> used = dictColumn.getColumnRefs(); Preconditions.checkState(used.stream().distinct().count() == 1); this.dictExpressions.computeIfAbsent(used.get(0).getId(), x -> Lists.newArrayList()).add(dictExpr); } } public List<ScalarOperator> getDictExpressions(int columnId) { if (!dictExpressions.containsKey(columnId)) { return Collections.emptyList(); } return dictExpressions.get(columnId); } public List<ScalarOperator> visitChildren(ScalarOperator operator, Void context) { List<ScalarOperator> children = Lists.newArrayList(); for (ScalarOperator child : operator.getChildren()) { children.add(child.accept(this, context)); } return children; } private ScalarOperator mergeWithArray(List<ScalarOperator> collectors, ScalarOperator scalarOperator) { if (collectors.stream().allMatch(CONSTANTS::equals)) { return CONSTANTS; } long variableExpr = collectors.stream().filter(VARIABLES::equals).count(); long dictCount = collectors.stream().filter(s -> !s.isConstant()).distinct().count(); if (dictCount == 1 && variableExpr == 0) { return collectors.stream().filter(s -> !s.isConstant()).findFirst().get(); } for (int i = 0; i < collectors.size(); i++) { saveDictExpr(collectors.get(i), scalarOperator.getChild(i)); } return VARIABLES; } private ScalarOperator forbidden(List<ScalarOperator> collectors, ScalarOperator scalarOperator) { if (collectors.stream().allMatch(CONSTANTS::equals)) { return CONSTANTS; } for (int i = 0; i < collectors.size(); i++) { saveDictExpr(collectors.get(i), scalarOperator.getChild(i)); } return VARIABLES; } private ScalarOperator merge(List<ScalarOperator> collectors, ScalarOperator scalarOperator) { if (collectors.stream().anyMatch(s -> s.getType().isArrayType())) { return forbidden(collectors, scalarOperator); } return mergeWithArray(collectors, scalarOperator); } @Override public ScalarOperator visit(ScalarOperator scalarOperator, Void context) { return forbidden(visitChildren(scalarOperator, context), scalarOperator); } @Override public ScalarOperator visitVariableReference(ColumnRefOperator variable, Void context) { if (allDictColumnRefs.contains(variable)) { return variable; } return VARIABLES; } @Override public ScalarOperator visitConstant(ConstantOperator literal, Void context) { return CONSTANTS; } @Override public ScalarOperator visitCall(CallOperator call, Void context) { if (FunctionSet.nonDeterministicFunctions.contains(call.getFnName())) { return VARIABLES; } if (FunctionSet.ARRAY_FILTER.equalsIgnoreCase(call.getFnName())) { List<ScalarOperator> result = visitChildren(call, context); return CONSTANTS.equals(result.get(1)) ? mergeWithArray(result, call) : forbidden(result, call); } if (FunctionSet.ARRAY_MIN.equalsIgnoreCase(call.getFnName()) || FunctionSet.ARRAY_MAX.equalsIgnoreCase(call.getFnName())) { ScalarOperator result = mergeWithArray(visitChildren(call, context), call); return !result.isConstant() ? call : result; } if (LOW_CARD_STRING_FUNCTIONS.contains(call.getFnName()) || LOW_CARD_ARRAY_FUNCTIONS.contains(call.getFnName()) || LOW_CARD_AGGREGATE_FUNCTIONS.contains(call.getFnName())) { return mergeWithArray(visitChildren(call, context), call); } return forbidden(visitChildren(call, context), call); } @Override public ScalarOperator visitCollectionElement(CollectionElementOperator collectionElementOp, Void context) { List<ScalarOperator> children = visitChildren(collectionElementOp, context); if (supportLowCardinality(collectionElementOp.getChild(0).getType())) { ScalarOperator result = mergeWithArray(children, collectionElementOp); return !result.isConstant() ? collectionElementOp : result; } return forbidden(children, collectionElementOp); } @Override public ScalarOperator visitBinaryPredicate(BinaryPredicateOperator predicate, Void context) { if (predicate.getBinaryType() == EQ_FOR_NULL) { return forbidden(visitChildren(predicate, context), predicate); } return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitCastOperator(CastOperator operator, Void context) { return merge(visitChildren(operator, context), operator); } @Override public ScalarOperator visitLikePredicateOperator(LikePredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitCompoundPredicate(CompoundPredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitInPredicate(InPredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitIsNullPredicate(IsNullPredicateOperator predicate, Void context) { return merge(visitChildren(predicate, context), predicate); } @Override public ScalarOperator visitCaseWhenOperator(CaseWhenOperator operator, Void context) { return merge(visitChildren(operator, context), operator); } } }
Nit ```suggestion assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getPrimaryKey() ) .isNotPresent(); ```
public void testAlterTableDropConstraint() throws Exception { prepareNonManagedTable(true); String expectedSummaryString = "ALTER TABLE cat1.db1.tb1\n DROP CONSTRAINT ct1"; Operation operation = parse("alter table tb1 drop constraint ct1"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()).isEqualTo(expectedSummaryString); assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getPrimaryKey() .isPresent()) .isEqualTo(false); operation = parse("alter table tb1 drop primary key"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()).isEqualTo(expectedSummaryString); assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getPrimaryKey() .isPresent()) .isEqualTo(false); }
.isEqualTo(false);
public void testAlterTableDropConstraint() throws Exception { prepareNonManagedTable(true); String expectedSummaryString = "ALTER TABLE cat1.db1.tb1\n DROP CONSTRAINT ct1"; Operation operation = parse("alter table tb1 drop constraint ct1"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()).isEqualTo(expectedSummaryString); assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getPrimaryKey()) .isNotPresent(); operation = parse("alter table tb1 drop primary key"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()).isEqualTo(expectedSummaryString); assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getPrimaryKey()) .isNotPresent(); }
class SqlToOperationConverterTest { private final boolean isStreamingMode = false; private final TableConfig tableConfig = TableConfig.getDefault(); private final Catalog catalog = new GenericInMemoryCatalog("MockCatalog", "default"); private final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager() .defaultCatalog("builtin", catalog) .config( Configuration.fromMap( Collections.singletonMap( ExecutionOptions.RUNTIME_MODE.key(), RuntimeExecutionMode.BATCH.name()))) .build(); private final PlannerMocks plannerMocks = PlannerMocks.newBuilder() .withBatchMode(true) .withTableConfig(tableConfig) .withCatalogManager(catalogManager) .withRootSchema( asRootSchema( new CatalogManagerCalciteSchema( catalogManager, isStreamingMode))) .build(); private final PlannerContext plannerContext = plannerMocks.getPlannerContext(); private final FunctionCatalog functionCatalog = plannerMocks.getFunctionCatalog(); private final Supplier<FlinkPlannerImpl> plannerSupplier = plannerContext::createFlinkPlanner; private final Parser parser = new ParserImpl( catalogManager, plannerSupplier, () -> plannerSupplier.get().parser(), plannerContext.getRexFactory()); @BeforeEach public void before() throws TableAlreadyExistException, DatabaseNotExistException { catalogManager.initSchemaResolver( isStreamingMode, ExpressionResolverMocks.basicResolver(catalogManager, functionCatalog, parser)); final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1"); final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2"); final TableSchema tableSchema = TableSchema.builder() .field("a", DataTypes.BIGINT()) .field("b", DataTypes.VARCHAR(Integer.MAX_VALUE)) .field("c", DataTypes.INT()) .field("d", DataTypes.VARCHAR(Integer.MAX_VALUE)) .build(); Map<String, String> options = new HashMap<>(); options.put("connector", "COLLECTION"); final CatalogTable catalogTable = new CatalogTableImpl(tableSchema, options, ""); catalog.createTable(path1, catalogTable, true); catalog.createTable(path2, catalogTable, true); } @AfterEach public void after() throws TableNotExistException { final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1"); final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2"); catalog.dropTable(path1, true); catalog.dropTable(path2, true); } @Test public void testUseCatalog() { final String sql = "USE CATALOG cat1"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(UseCatalogOperation.class); assertThat(((UseCatalogOperation) operation).getCatalogName()).isEqualTo("cat1"); assertThat(operation.asSummaryString()).isEqualTo("USE CATALOG cat1"); } @Test public void testUseDatabase() { final String sql1 = "USE db1"; Operation operation1 = parse(sql1); assertThat(operation1).isInstanceOf(UseDatabaseOperation.class); assertThat(((UseDatabaseOperation) operation1).getCatalogName()).isEqualTo("builtin"); assertThat(((UseDatabaseOperation) operation1).getDatabaseName()).isEqualTo("db1"); final String sql2 = "USE cat1.db1"; Operation operation2 = parse(sql2); assertThat(operation2).isInstanceOf(UseDatabaseOperation.class); assertThat(((UseDatabaseOperation) operation2).getCatalogName()).isEqualTo("cat1"); assertThat(((UseDatabaseOperation) operation2).getDatabaseName()).isEqualTo("db1"); } @Test public void testUseDatabaseWithException() { final String sql = "USE cat1.db1.tbl1"; assertThatThrownBy(() -> parse(sql)).isInstanceOf(ValidationException.class); } @Test public void testCreateDatabase() { final String[] createDatabaseSqls = new String[] { "create database db1", "create database if not exists cat1.db1", "create database cat1.db1 comment 'db1_comment'", "create database cat1.db1 comment 'db1_comment' with ('k1' = 'v1', 'K2' = 'V2')" }; final String[] expectedCatalogs = new String[] {"builtin", "cat1", "cat1", "cat1"}; final String expectedDatabase = "db1"; final String[] expectedComments = new String[] {null, null, "db1_comment", "db1_comment"}; final boolean[] expectedIgnoreIfExists = new boolean[] {false, true, false, false}; Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); final Map[] expectedProperties = new Map[] { new HashMap<String, String>(), new HashMap<String, String>(), new HashMap<String, String>(), new HashMap(properties) }; for (int i = 0; i < createDatabaseSqls.length; i++) { Operation operation = parse(createDatabaseSqls[i]); assertThat(operation).isInstanceOf(CreateDatabaseOperation.class); final CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation; assertThat(createDatabaseOperation.getCatalogName()).isEqualTo(expectedCatalogs[i]); assertThat(createDatabaseOperation.getDatabaseName()).isEqualTo(expectedDatabase); assertThat(createDatabaseOperation.getCatalogDatabase().getComment()) .isEqualTo(expectedComments[i]); assertThat(createDatabaseOperation.isIgnoreIfExists()) .isEqualTo(expectedIgnoreIfExists[i]); assertThat(createDatabaseOperation.getCatalogDatabase().getProperties()) .isEqualTo(expectedProperties[i]); } } @Test public void testDropDatabase() { final String[] dropDatabaseSqls = new String[] { "drop database db1", "drop database if exists db1", "drop database if exists cat1.db1 CASCADE", "drop database if exists cat1.db1 RESTRICT" }; final String[] expectedCatalogs = new String[] {"builtin", "builtin", "cat1", "cat1"}; final String expectedDatabase = "db1"; final boolean[] expectedIfExists = new boolean[] {false, true, true, true}; final boolean[] expectedIsCascades = new boolean[] {false, false, true, false}; for (int i = 0; i < dropDatabaseSqls.length; i++) { Operation operation = parse(dropDatabaseSqls[i]); assertThat(operation).isInstanceOf(DropDatabaseOperation.class); final DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation; assertThat(dropDatabaseOperation.getCatalogName()).isEqualTo(expectedCatalogs[i]); assertThat(dropDatabaseOperation.getDatabaseName()).isEqualTo(expectedDatabase); assertThat(dropDatabaseOperation.isIfExists()).isEqualTo(expectedIfExists[i]); assertThat(dropDatabaseOperation.isCascade()).isEqualTo(expectedIsCascades[i]); } } @Test public void testAlterDatabase() throws Exception { catalogManager.registerCatalog("cat1", new GenericInMemoryCatalog("default", "default")); catalogManager .getCatalog("cat1") .get() .createDatabase( "db1", new CatalogDatabaseImpl(new HashMap<>(), "db1_comment"), true); final String sql = "alter database cat1.db1 set ('k1'='v1', 'K2'='V2')"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(AlterDatabaseOperation.class); Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation; assertThat(alterDatabaseOperation.getDatabaseName()).isEqualTo("db1"); assertThat(alterDatabaseOperation.getCatalogName()).isEqualTo("cat1"); assertThat(alterDatabaseOperation.getCatalogDatabase().getComment()) .isEqualTo("db1_comment"); assertThat(alterDatabaseOperation.getCatalogDatabase().getProperties()) .isEqualTo(properties); } @Test public void testLoadModule() { final String sql = "LOAD MODULE dummy WITH ('k1' = 'v1', 'k2' = 'v2')"; final String expectedModuleName = "dummy"; final Map<String, String> expectedOptions = new HashMap<>(); expectedOptions.put("k1", "v1"); expectedOptions.put("k2", "v2"); Operation operation = parse(sql); assertThat(operation).isInstanceOf(LoadModuleOperation.class); final LoadModuleOperation loadModuleOperation = (LoadModuleOperation) operation; assertThat(loadModuleOperation.getModuleName()).isEqualTo(expectedModuleName); assertThat(loadModuleOperation.getOptions()).isEqualTo(expectedOptions); } @Test public void testUnloadModule() { final String sql = "UNLOAD MODULE dummy"; final String expectedModuleName = "dummy"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(UnloadModuleOperation.class); final UnloadModuleOperation unloadModuleOperation = (UnloadModuleOperation) operation; assertThat(unloadModuleOperation.getModuleName()).isEqualTo(expectedModuleName); } @Test public void testUseOneModule() { final String sql = "USE MODULES dummy"; final List<String> expectedModuleNames = Collections.singletonList("dummy"); Operation operation = parse(sql); assertThat(operation).isInstanceOf(UseModulesOperation.class); final UseModulesOperation useModulesOperation = (UseModulesOperation) operation; assertThat(useModulesOperation.getModuleNames()).isEqualTo(expectedModuleNames); assertThat(useModulesOperation.asSummaryString()).isEqualTo("USE MODULES: [dummy]"); } @Test public void testUseMultipleModules() { final String sql = "USE MODULES x, y, z"; final List<String> expectedModuleNames = Arrays.asList("x", "y", "z"); Operation operation = parse(sql); assertThat(operation).isInstanceOf(UseModulesOperation.class); final UseModulesOperation useModulesOperation = (UseModulesOperation) operation; assertThat(useModulesOperation.getModuleNames()).isEqualTo(expectedModuleNames); assertThat(useModulesOperation.asSummaryString()).isEqualTo("USE MODULES: [x, y, z]"); } @Test public void testShowModules() { final String sql = "SHOW MODULES"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowModulesOperation.class); final ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation; assertThat(showModulesOperation.requireFull()).isFalse(); assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW MODULES"); } @Test public void testShowTables() { final String sql = "SHOW TABLES from cat1.db1 not like 't%'"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowTablesOperation.class); ShowTablesOperation showTablesOperation = (ShowTablesOperation) operation; assertThat(showTablesOperation.getCatalogName()).isEqualTo("cat1"); assertThat(showTablesOperation.getDatabaseName()).isEqualTo("db1"); assertThat(showTablesOperation.getPreposition()).isEqualTo("FROM"); assertThat(showTablesOperation.isUseLike()).isTrue(); assertThat(showTablesOperation.isNotLike()).isTrue(); final String sql2 = "SHOW TABLES in db2"; showTablesOperation = (ShowTablesOperation) parse(sql2); assertThat(showTablesOperation.getCatalogName()).isEqualTo("builtin"); assertThat(showTablesOperation.getDatabaseName()).isEqualTo("db2"); assertThat(showTablesOperation.getPreposition()).isEqualTo("IN"); assertThat(showTablesOperation.isUseLike()).isFalse(); assertThat(showTablesOperation.isNotLike()).isFalse(); final String sql3 = "SHOW TABLES"; showTablesOperation = (ShowTablesOperation) parse(sql3); assertThat(showTablesOperation.getCatalogName()).isNull(); assertThat(showTablesOperation.getDatabaseName()).isNull(); assertThat(showTablesOperation.getPreposition()).isNull(); } @Test public void testShowFullModules() { final String sql = "SHOW FULL MODULES"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowModulesOperation.class); final ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation; assertThat(showModulesOperation.requireFull()).isTrue(); assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW FULL MODULES"); } @Test public void testShowFunctions() { final String sql1 = "SHOW FUNCTIONS"; assertShowFunctions(sql1, sql1, FunctionScope.ALL); final String sql2 = "SHOW USER FUNCTIONS"; assertShowFunctions(sql2, sql2, FunctionScope.USER); } @Test public void testCreateTable() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar" + ")\n" + " PARTITIONED BY (a, d)\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); assertThat(catalogTable.getPartitionKeys()).hasSameElementsAs(Arrays.asList("a", "d")); assertThat(catalogTable.getSchema().getFieldNames()) .isEqualTo(new String[] {"a", "b", "c", "d"}); assertThat(catalogTable.getSchema().getFieldDataTypes()) .isEqualTo( new DataType[] { DataTypes.BIGINT(), DataTypes.VARCHAR(Integer.MAX_VALUE), DataTypes.INT(), DataTypes.VARCHAR(Integer.MAX_VALUE) }); } @Test public void testCreateTableWithPrimaryKey() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 primary key(a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); TableSchema tableSchema = catalogTable.getSchema(); assertThat( tableSchema .getPrimaryKey() .map(UniqueConstraint::asSummaryString) .orElse("fakeVal")) .isEqualTo("CONSTRAINT ct1 PRIMARY KEY (a, b)"); assertThat(tableSchema.getFieldNames()).isEqualTo(new String[] {"a", "b", "c", "d"}); assertThat(tableSchema.getFieldDataTypes()) .isEqualTo( new DataType[] { DataTypes.BIGINT().notNull(), DataTypes.STRING().notNull(), DataTypes.INT(), DataTypes.STRING() }); } @Test public void testPrimaryKeyOnGeneratedColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint not null,\n" + " b varchar not null,\n" + " c as 2 * (a + 1),\n" + " constraint ct1 primary key (b, c) not enforced" + ") with (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Could not create a PRIMARY KEY with column 'c' at line 5, column 34.\n" + "A PRIMARY KEY constraint must be declared on physical columns."); } @Test public void testPrimaryKeyNonExistentColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint not null,\n" + " b varchar not null,\n" + " c as 2 * (a + 1),\n" + " constraint ct1 primary key (b, d) not enforced" + ") with (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Primary key column 'd' is not defined in the schema at line 5, column 34"); } @Test public void testCreateTableWithMinusInOptionKey() { final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c varchar\n" + ") with (\n" + " 'a-B-c-d124' = 'Ab',\n" + " 'a.b-c-d.e-f.g' = 'ada',\n" + " 'a.b-c-d.e-f1231.g' = 'ada',\n" + " 'a.b-c-d.*' = 'adad')\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlCreateTable.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); Map<String, String> options = catalogTable.getOptions().entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Map<String, String> sortedProperties = new TreeMap<>(options); final String expected = "{a-B-c-d124=Ab, " + "a.b-c-d.*=adad, " + "a.b-c-d.e-f.g=ada, " + "a.b-c-d.e-f1231.g=ada}"; assertThat(sortedProperties.toString()).isEqualTo(expected); } @Test public void testExplainWithSelect() { final String sql = "explain select * from t1"; checkExplainSql(sql); } @Test public void testExplainWithInsert() { final String sql = "explain insert into t2 select * from t1"; checkExplainSql(sql); } @Test public void testExplainWithUnion() { final String sql = "explain select * from t1 union select * from t2"; checkExplainSql(sql); } @Test public void testExplainWithExplainDetails() { String sql = "explain changelog_mode, estimated_cost, json_execution_plan select * from t1"; checkExplainSql(sql); } @Test public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException { CatalogFunction cf = new CatalogFunctionImpl(JavaUserDefinedScalarFunctions.JavaFunc5.class.getName()); catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true); final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c timestamp(3),\n" + " watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + " 'connector.type' = 'kafka')\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlCreateTable.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); Map<String, String> properties = catalogTable.toProperties(); Map<String, String> expected = new HashMap<>(); expected.put("schema.0.name", "a"); expected.put("schema.0.data-type", "INT"); expected.put("schema.1.name", "b"); expected.put("schema.1.data-type", "BIGINT"); expected.put("schema.2.name", "c"); expected.put("schema.2.data-type", "TIMESTAMP(3)"); expected.put("schema.watermark.0.rowtime", "c"); expected.put( "schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND"); expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)"); expected.put("connector.type", "kafka"); assertThat(properties).isEqualTo(expected); } @Test public void testBasicCreateTableLike() { Map<String, String> sourceProperties = new HashMap<>(); sourceProperties.put("format.type", "json"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .build(), null, Collections.emptyList(), sourceProperties); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")\n" + "PARTITIONED BY (a, f0)\n" + "with (\n" + " 'connector.type' = 'kafka'" + ")\n" + "like sourceTable"; Operation operation = parseAndConvert(sql); assertThat(operation) .is( new HamcrestCondition<>( isCreateTableOperation( withSchema( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .column("a", DataTypes.INT()) .watermark( "f1", "`f1` - INTERVAL '5' SECOND") .build()), withOptions( entry("connector.type", "kafka"), entry("format.type", "json")), partitionedBy("a", "f0")))); } @Test public void testCreateTableLikeWithFullPath() { Map<String, String> sourceProperties = new HashMap<>(); sourceProperties.put("connector.type", "kafka"); sourceProperties.put("format.type", "json"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .build(), null, Collections.emptyList(), sourceProperties); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table mytable like `builtin`.`default`.sourceTable"; Operation operation = parseAndConvert(sql); assertThat(operation) .is( new HamcrestCondition<>( isCreateTableOperation( withSchema( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .build()), withOptions( entry("connector.type", "kafka"), entry("format.type", "json"))))); } @Test public void testMergingCreateTableLike() { Map<String, String> sourceProperties = new HashMap<>(); sourceProperties.put("format.type", "json"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .columnByExpression("f2", "`f0` + 12345") .watermark("f1", "`f1` - interval '1' second") .build(), null, Arrays.asList("f0", "f1"), sourceProperties); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")\n" + "PARTITIONED BY (a, f0)\n" + "with (\n" + " 'connector.type' = 'kafka'" + ")\n" + "like sourceTable (\n" + " EXCLUDING GENERATED\n" + " EXCLUDING PARTITIONS\n" + " OVERWRITING OPTIONS\n" + " OVERWRITING WATERMARKS" + ")"; Operation operation = parseAndConvert(sql); assertThat(operation) .is( new HamcrestCondition<>( isCreateTableOperation( withSchema( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .column("a", DataTypes.INT()) .watermark( "f1", "`f1` - INTERVAL '5' SECOND") .build()), withOptions( entry("connector.type", "kafka"), entry("format.type", "json")), partitionedBy("a", "f0")))); } @Test public void testCreateTableInvalidPartition() { final String sql = "create table derivedTable(\n" + " a int\n" + ")\n" + "PARTITIONED BY (f3)"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Partition column 'f3' not defined in the table schema. Available columns: ['a']"); } @Test public void testCreateTableLikeInvalidPartition() { CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder().column("f0", DataTypes.INT().notNull()).build(), null, Collections.emptyList(), Collections.emptyMap()); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int\n" + ")\n" + "PARTITIONED BY (f3)\n" + "like sourceTable"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Partition column 'f3' not defined in the table schema. Available columns: ['f0', 'a']"); } @Test public void testCreateTableInvalidWatermark() { final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The rowtime attribute field 'f1' is not defined in the table schema," + " at line 3, column 17\n" + "Available fields: ['a']"); } @Test public void testCreateTableLikeInvalidWatermark() { CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder().column("f0", DataTypes.INT().notNull()).build(), null, Collections.emptyList(), Collections.emptyMap()); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")\n" + "like sourceTable"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The rowtime attribute field 'f1' is not defined in the table schema," + " at line 3, column 17\n" + "Available fields: ['f0', 'a']"); } @Test public void testCreateTableLikeNestedWatermark() { CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column( "f1", DataTypes.ROW( DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))) .build(), null, Collections.emptyList(), Collections.emptyMap()); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1.t as f1.t - interval '5' second\n" + ")\n" + "like sourceTable"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The rowtime attribute field 'f1.t' is not defined in the table schema," + " at line 3, column 20\n" + "Nested field 't' was not found in a composite type:" + " ROW<`tmstmp` TIMESTAMP(3)>."); } @Test public void testSqlInsertWithStaticPartition() { final String sql = "insert into t1 partition(a=1) select b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(SinkModifyOperation.class); SinkModifyOperation sinkModifyOperation = (SinkModifyOperation) operation; final Map<String, String> expectedStaticPartitions = new HashMap<>(); expectedStaticPartitions.put("a", "1"); assertThat(sinkModifyOperation.getStaticPartitions()).isEqualTo(expectedStaticPartitions); } @Test public void testSqlInsertWithDynamicTableOptions() { final String sql = "insert into t1 /*+ OPTIONS('k1'='v1', 'k2'='v2') */\n" + "select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(SinkModifyOperation.class); SinkModifyOperation sinkModifyOperation = (SinkModifyOperation) operation; Map<String, String> dynamicOptions = sinkModifyOperation.getDynamicOptions(); assertThat(dynamicOptions).isNotNull(); assertThat(dynamicOptions.size()).isEqualTo(2); assertThat(dynamicOptions.toString()).isEqualTo("{k1=v1, k2=v2}"); } @Test public void testDynamicTableWithInvalidOptions() { final String sql = "select * from t1 /*+ OPTIONS('opt1', 'opt2') */"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertThatThrownBy(() -> parse(sql, planner, parser)) .isInstanceOf(AssertionError.class) .hasMessageContaining( "Hint [OPTIONS] only support " + "non empty key value options"); } @Test public void testCreateTableWithFullDataTypes() { final List<TestItem> testItems = Arrays.asList( createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIME(3)", DataTypes.TIME()), createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem( "TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem( "TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem( "ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem( "ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem( "INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem( "INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem( "MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem( "INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem( "INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem( "MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), createTestItem( "ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem( "ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem( "ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem( "ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), createTestItem( "ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem( "ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem( "ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem( "MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem( "ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW( DataTypes.FIELD( "f0", DataTypes.ROW( DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD( "f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD( "f2", DataTypes.MULTISET(DataTypes.BOOLEAN()))))); StringBuilder buffer = new StringBuilder("create table t1(\n"); for (int i = 0; i < testItems.size(); i++) { buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr); if (i == testItems.size() - 1) { buffer.append(")"); } else { buffer.append(",\n"); } } final String sql = buffer.toString(); final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlCreateTable.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema(); Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray(); assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes); } @Test public void testCreateTableWithComputedColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a int,\n" + " b varchar, \n" + " c as a - 1, \n" + " d as b || '$$', \n" + " e as my_udf1(a)," + " f as `default`.my_udf2(a) + 1," + " g as builtin.`default`.my_udf3(a) || ' + ")\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$); functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$); functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$); FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); assertThat(catalogTable.getSchema().getFieldNames()) .isEqualTo(new String[] {"a", "b", "c", "d", "e", "f", "g"}); assertThat(catalogTable.getSchema().getFieldDataTypes()) .isEqualTo( new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() }); String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream() .filter(ComputedColumn.class::isInstance) .map(ComputedColumn.class::cast) .map(ComputedColumn::getExpression) .toArray(String[]::new); String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || ' }; assertThat(columnExpressions).isEqualTo(expected); } @Test public void testCreateTableWithMetadataColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a INT,\n" + " b STRING,\n" + " c INT METADATA,\n" + " d INT METADATA FROM 'other.key',\n" + " e INT METADATA VIRTUAL\n" + ")\n" + " WITH (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateTableOperation.class); final CreateTableOperation op = (CreateTableOperation) operation; final TableSchema actualSchema = op.getCatalogTable().getSchema(); final TableSchema expectedSchema = TableSchema.builder() .add(TableColumn.physical("a", DataTypes.INT())) .add(TableColumn.physical("b", DataTypes.STRING())) .add(TableColumn.metadata("c", DataTypes.INT())) .add(TableColumn.metadata("d", DataTypes.INT(), "other.key")) .add(TableColumn.metadata("e", DataTypes.INT(), true)) .build(); assertThat(actualSchema).isEqualTo(expectedSchema); } @Test public void testCreateFunction() { String sql = "CREATE FUNCTION test_udf AS 'org.apache.fink.function.function1' " + "LANGUAGE JAVA USING JAR 'file: final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateCatalogFunctionOperation.class); CatalogFunction actualFunction = ((CreateCatalogFunctionOperation) operation).getCatalogFunction(); assertThat(operation.asSummaryString()) .isEqualTo( "CREATE CATALOG FUNCTION: (catalogFunction: [Optional[This is a user-defined function]], " + "identifier: [`builtin`.`default`.`test_udf`], ignoreIfExists: [false], isTemporary: [false])"); CatalogFunction expected = new CatalogFunctionImpl( "org.apache.fink.function.function1", FunctionLanguage.JAVA, Collections.singletonList( new ResourceUri(ResourceType.JAR, "file: assertThat(actualFunction).isEqualTo(expected); sql = "CREATE TEMPORARY SYSTEM FUNCTION test_udf2 AS 'org.apache.fink.function.function2' " + "LANGUAGE SCALA USING JAR 'file: operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateTempSystemFunctionOperation.class); assertThat(operation.asSummaryString()) .isEqualTo( "CREATE TEMPORARY SYSTEM FUNCTION: (functionName: [test_udf2], " + "catalogFunction: [CatalogFunctionImpl{className='org.apache.fink.function.function2', " + "functionLanguage='SCALA', " + "functionResource='[ResourceUri{resourceType=JAR, uri='file: + "ignoreIfExists: [false], functionLanguage: [SCALA])"); } @Test public void testAlterTable() throws Exception { prepareNonManagedTable(false); final String[] renameTableSqls = new String[] { "alter table cat1.db1.tb1 rename to tb2", "alter table db1.tb1 rename to tb2", "alter table tb1 rename to cat1.db1.tb2", }; final ObjectIdentifier expectedIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); final ObjectIdentifier expectedNewIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2"); for (int i = 0; i < renameTableSqls.length; i++) { Operation operation = parse(renameTableSqls[i]); assertThat(operation).isInstanceOf(AlterTableRenameOperation.class); final AlterTableRenameOperation alterTableRenameOperation = (AlterTableRenameOperation) operation; assertThat(alterTableRenameOperation.getTableIdentifier()) .isEqualTo(expectedIdentifier); assertThat(alterTableRenameOperation.getNewTableIdentifier()) .isEqualTo(expectedNewIdentifier); } Operation operation = parse("alter table cat1.db1.tb1 set ('k1' = 'v1', 'K2' = 'V2')"); Map<String, String> expectedOptions = new HashMap<>(); expectedOptions.put("connector", "dummy"); expectedOptions.put("k", "v"); expectedOptions.put("k1", "v1"); expectedOptions.put("K2", "V2"); assertAlterTableOptions( operation, expectedIdentifier, expectedOptions, Arrays.asList(TableChange.set("k1", "v1"), TableChange.set("K2", "V2")), "ALTER TABLE cat1.db1.tb1\n SET 'k1' = 'v1',\n SET 'K2' = 'V2'"); operation = parse("alter table cat1.db1.tb1 reset ('k')"); assertAlterTableOptions( operation, expectedIdentifier, Collections.singletonMap("connector", "dummy"), Collections.singletonList(TableChange.reset("k")), "ALTER TABLE cat1.db1.tb1\n RESET 'k'"); assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ('connector')")) .isInstanceOf(ValidationException.class) .hasMessageContaining("ALTER TABLE RESET does not support changing 'connector'"); assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ()")) .isInstanceOf(ValidationException.class) .hasMessageContaining("ALTER TABLE RESET does not support empty key"); } @Test public void testAlterTableRenameColumn() throws Exception { prepareTable("tb1", false, false, true, 3); Operation operation = parse("alter table tb1 rename c to c1"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo("ALTER TABLE cat1.db1.tb1\n MODIFY `c` TO `c1`"); assertThat(((AlterTableChangeOperation) operation).getNewTable().getUnresolvedSchema()) .isEqualTo( Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c1", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .watermark("ts", "ts - interval '5' seconds") .primaryKeyNamed("ct1", "a", "b", "c1") .build()); operation = parse("alter table tb1 rename f to f1"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo("ALTER TABLE cat1.db1.tb1\n MODIFY `f` TO `f1`"); assertThat(((AlterTableChangeOperation) operation).getNewTable().getUnresolvedSchema()) .isEqualTo( Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f1", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .watermark("ts", "ts - interval '5' seconds") .primaryKeyNamed("ct1", "a", "b", "c") .build()); assertThatThrownBy(() -> parse("alter table tb1 rename a to a1")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `a` is referenced by computed column `d`."); assertThatThrownBy(() -> parse("alter table tb1 rename ts to ts1")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `ts` is referenced by watermark expression."); assertThatThrownBy(() -> parse("alter table tb1 rename e.f1 to e.f11")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f1 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 rename c to a")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `a` already existed in table schema."); CatalogTable catalogTable2 = CatalogTable.of( Schema.newBuilder() .column("a", DataTypes.STRING().notNull()) .column("b", DataTypes.INT().notNull()) .column("e", DataTypes.STRING()) .columnByExpression("j", $("e").upperCase()) .columnByExpression("g", "TO_TIMESTAMP(e)") .primaryKey("a", "b") .build(), "tb2", Collections.singletonList("a"), Collections.emptyMap()); catalogManager .getCatalog("cat1") .get() .createTable(new ObjectPath("db1", "tb2"), catalogTable2, true); assertThatThrownBy(() -> parse("alter table `cat1`.`db1`.`tb2` rename e to e1")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Failed to execute ALTER TABLE statement.\nThe column `e` is referenced by computed column `g`, `j`."); assertThatThrownBy(() -> parse("alter table tb2 rename a to a1")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Failed to execute ALTER TABLE statement.\nThe column `a` is used as the partition keys."); } @Test public void testFailedToAlterTableDropColumn() throws Exception { prepareTable("tb1", false, false, true, 3); assertThatThrownBy(() -> parse("alter table tb1 drop x")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `x` does not exist in the base table."); assertThatThrownBy(() -> parse("alter table tb1 drop (g, x)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `x` does not exist in the base table."); assertThatThrownBy(() -> parse("alter table tb1 drop (g, c, g)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Duplicate column `g`."); assertThatThrownBy(() -> parse("alter table tb1 drop e.f2")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f2 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 drop a")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `a` is referenced by computed column `d`."); assertThatThrownBy(() -> parse("alter table tb1 drop c")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `c` is used as the primary key."); assertThatThrownBy(() -> parse("alter table tb1 drop ts")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `ts` is referenced by watermark expression."); } @Test public void testAlterTableDropColumn() throws Exception { prepareNonManagedTable(false); Operation operation = parse("alter table tb1 drop c"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()).isEqualTo("ALTER TABLE cat1.db1.tb1\n DROP `c`"); assertThat( ((AlterTableChangeOperation) operation) .getNewTable().getUnresolvedSchema().getColumns().stream() .map(Schema.UnresolvedColumn::getName) .collect(Collectors.toList())) .doesNotContain("c"); operation = parse("alter table tb1 drop (f, e, b, d)"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " DROP `d`,\n" + " DROP `f`,\n" + " DROP `b`,\n" + " DROP `e`"); assertThat( ((AlterTableChangeOperation) operation) .getNewTable().getUnresolvedSchema().getColumns().stream() .map(Schema.UnresolvedColumn::getName) .collect(Collectors.toList())) .doesNotContain("f", "e", "b", "d"); } @Test public void testFailedToAlterTableDropConstraint() throws Exception { prepareNonManagedTable("tb1", 0); assertThatThrownBy(() -> parse("alter table tb1 drop primary key")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The base table does not define any primary key."); assertThatThrownBy(() -> parse("alter table tb1 drop constraint ct")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The base table does not define any primary key."); prepareNonManagedTable("tb2", 1); assertThatThrownBy(() -> parse("alter table tb2 drop constraint ct2")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table does not define a primary key constraint named 'ct2'. Available constraint name: ['ct1']."); } @Test @Test public void testFailedToAlterTableDropWatermark() throws Exception { prepareNonManagedTable("tb1", false); assertThatThrownBy(() -> parse("alter table tb1 drop watermark")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The base table does not define any watermark strategy."); } @Test public void testAlterTableDropWatermark() throws Exception { prepareNonManagedTable("tb1", true); Operation operation = parse("alter table tb1 drop watermark"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo("ALTER TABLE cat1.db1.tb1\n DROP WATERMARK"); assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getWatermarkSpecs()) .isEqualTo(Collections.emptyList()); } @Test public void testAlterTableCompactOnNonManagedTable() throws Exception { prepareNonManagedTable(false); assertThatThrownBy(() -> parse("alter table tb1 compact")) .isInstanceOf(ValidationException.class) .hasMessage( "ALTER TABLE COMPACT operation is not supported for non-managed table `cat1`.`db1`.`tb1`"); } @Test public void testAlterTableCompactOnManagedNonPartitionedTable() throws Exception { prepareManagedTable(false); assertThatThrownBy(() -> parse("alter table tb1 partition(dt = 'a') compact")) .isInstanceOf(ValidationException.class) .hasMessage( "Partition column 'dt' not defined in the table schema. Table `cat1`.`db1`.`tb1` is not partitioned."); assertThatThrownBy(() -> parse("alter table tb2 compact")) .isInstanceOf(ValidationException.class) .hasMessage("Table `cat1`.`db1`.`tb2` doesn't exist or is a temporary table."); checkAlterTableCompact(parse("alter table tb1 compact"), Collections.emptyMap()); } @Test public void testAlterTableCompactOnManagedPartitionedTable() throws Exception { prepareManagedTable(true); assertThatThrownBy(() -> parse("alter table tb1 partition (dt = 'a') compact")) .isInstanceOf(ValidationException.class) .hasMessage( "Partition column 'dt' not defined in the table schema. Available ordered partition columns: ['b', 'c']"); Map<String, String> staticPartitions = new HashMap<>(); staticPartitions.put("b", "0"); staticPartitions.put("c", "flink"); checkAlterTableCompact( parse("alter table tb1 partition (b = 0, c = 'flink') compact"), staticPartitions); staticPartitions = Collections.singletonMap("b", "0"); checkAlterTableCompact( parse("alter table tb1 partition (b = 0) compact"), staticPartitions); staticPartitions = Collections.singletonMap("c", "flink"); checkAlterTableCompact( parse("alter table tb1 partition (c = 'flink') compact"), staticPartitions); staticPartitions = Collections.emptyMap(); checkAlterTableCompact(parse("alter table tb1 compact"), staticPartitions); } @Test public void testFailedToAlterTableAddColumn() throws Exception { prepareNonManagedTable("tb1", 0); assertThatThrownBy(() -> parse("alter table tb1 add a bigint")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Try to add a column `a` which already exists in the table."); assertThatThrownBy(() -> parse("alter table tb1 add (x array<string>, x string)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Encounter duplicate column `x`."); assertThatThrownBy(() -> parse("alter table tb1 add x bigint after y")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Referenced column `y` by 'AFTER' does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 add (x bigint after y, y string first)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Referenced column `y` by 'AFTER' does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 add m as n + 2")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'm'."); assertThatThrownBy(() -> parse("alter table tb1 add (m as b * 2, n as m + 2)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'n'."); assertThatThrownBy(() -> parse("alter table tb1 add (m as 'hello' || b)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'm'."); assertThatThrownBy(() -> parse("alter table tb1 add (e.f3 string)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f3 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 add (x string after e.f2)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 add (e.f3 string after e.f1)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f3 is not supported yet."); } @Test public void testAlterTableAddColumn() throws Exception { prepareNonManagedTable("tb1", 0); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); Operation operation = parse("alter table tb1 add h double not null comment 'h is double not null'"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `h` DOUBLE NOT NULL COMMENT 'h is double not null' "); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("h", DataTypes.DOUBLE().notNull()) .withComment("h is double not null") .build()); operation = parse( "alter table tb1 add (\n" + " h as e.f2.f1 first,\n" + " i as b*2 after b,\n" + " j int metadata from 'mk1' virtual comment 'comment_metadata' first,\n" + " k string primary key not enforced after h)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `h` ARRAY<FLOAT> AS `e`.`f2`.`f1` FIRST,\n" + " ADD `i` BIGINT NOT NULL AS `b` * 2 AFTER `b`,\n" + " ADD `j` INT METADATA FROM 'mk1' VIRTUAL COMMENT 'comment_metadata' FIRST,\n" + " ADD `k` STRING NOT NULL AFTER `h`,\n" + " ADD CONSTRAINT `PK_k` PRIMARY KEY (`k`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .columnByMetadata("j", DataTypes.INT(), "mk1", true) .withComment("comment_metadata") .columnByExpression("h", "`e`.`f2`.`f1`") .column("k", DataTypes.STRING().notNull()) .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .columnByExpression("i", new SqlCallExpression("`b` * 2")) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .primaryKey("k") .build()); operation = parse( "alter table tb1 add (\n" + " r row<r1 bigint, r2 string, r3 array<double> not null> not null comment 'add composite type',\n" + " m map<string not null, int not null>,\n" + " n as r.r1 * 2 after r,\n" + " tss as to_timestamp(r.r2) comment 'rowtime' after ts,\n" + " na as r.r3 after ts)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `r` ROW<`r1` BIGINT, `r2` STRING, `r3` ARRAY<DOUBLE> NOT NULL> NOT NULL COMMENT 'add composite type' ,\n" + " ADD `m` MAP<STRING NOT NULL, INT NOT NULL> ,\n" + " ADD `n` BIGINT AS `r`.`r1` * 2 AFTER `r`,\n" + " ADD `tss` TIMESTAMP(3) AS `to_timestamp`(`r`.`r2`) COMMENT 'rowtime' AFTER `ts`,\n" + " ADD `na` ARRAY<DOUBLE> NOT NULL AS `r`.`r3` AFTER `ts`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .columnByExpression("na", "`r`.`r3`") .columnByExpression("tss", "`to_timestamp`(`r`.`r2`)") .withComment("rowtime") .column( "r", DataTypes.ROW( DataTypes.FIELD("r1", DataTypes.BIGINT()), DataTypes.FIELD("r2", DataTypes.STRING()), DataTypes.FIELD( "r3", DataTypes.ARRAY(DataTypes.DOUBLE()) .notNull())) .notNull()) .withComment("add composite type") .columnByExpression("n", "`r`.`r1` * 2") .column( "m", DataTypes.MAP( DataTypes.STRING().notNull(), DataTypes.INT().notNull())) .build()); } @Test public void testFailedToAlterTableAddPk() throws Exception { prepareNonManagedTable("tb1", 1); assertThatThrownBy(() -> parse("alter table tb1 add primary key(c) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`]. " + "You might want to drop it before adding a new one."); assertThatThrownBy( () -> parse( "alter table tb1 add x string not null primary key not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`]. " + "You might want to drop it before adding a new one"); prepareNonManagedTable("tb2", 2); assertThatThrownBy(() -> parse("alter table tb2 add primary key(c) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`, `b`]. " + "You might want to drop it before adding a new one"); assertThatThrownBy( () -> parse( "alter table tb2 add x string not null primary key not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`, `b`]. " + "You might want to drop it before adding a new one"); prepareNonManagedTable("tb3", 0); assertThatThrownBy(() -> parse("alter table tb3 add primary key (x) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid primary key 'PK_x'. Column 'x' does not exist."); assertThatThrownBy(() -> parse("alter table tb3 add unique(b)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("UNIQUE constraint is not supported yet"); assertThatThrownBy(() -> parse("alter table tb3 add primary key(b)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Flink doesn't support ENFORCED mode for PRIMARY KEY constraint"); assertThatThrownBy( () -> parse( "alter table tb3 add (\n" + " x as upper(c),\n" + " primary key (d, x) not enforced)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'PK_d_x'. Column 'd' is not a physical column."); assertThatThrownBy(() -> parse("alter table tb3 add (primary key (g) not enforced)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'PK_g'. Column 'g' is not a physical column."); } @Test public void testAlterTableAddPrimaryKey() throws Exception { prepareNonManagedTable("tb1", 0); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); Operation operation = parse("alter table tb1 add constraint my_pk primary key (a, b) not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD CONSTRAINT `my_pk` PRIMARY KEY (`a`, `b`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .primaryKeyNamed("my_pk", "a", "b") .build()); operation = parse("alter table tb1 add x bigint not null primary key not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `x` BIGINT NOT NULL ,\n" + " ADD CONSTRAINT `PK_x` PRIMARY KEY (`x`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("x", DataTypes.BIGINT().notNull()) .primaryKey("x") .build()); operation = parse("alter table tb1 add x bigint primary key not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `x` BIGINT NOT NULL ,\n" + " ADD CONSTRAINT `PK_x` PRIMARY KEY (`x`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("x", DataTypes.BIGINT().notNull()) .primaryKey("x") .build()); operation = parse("alter table tb1 add constraint ct primary key(ts) not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD CONSTRAINT `ct` PRIMARY KEY (`ts`) NOT ENFORCED"); List<Schema.UnresolvedColumn> subColumns = originalSchema.getColumns().subList(0, originalSchema.getColumns().size() - 1); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromColumns(subColumns) .column("ts", DataTypes.TIMESTAMP(3).notNull()) .withComment("just a comment") .primaryKeyNamed("ct", "ts") .build()); } @Test public void testFailedToAlterTableAddWatermark() throws Exception { prepareNonManagedTable("tb1", false); assertThatThrownBy(() -> parse("alter table tb1 add watermark for x as x")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid column name 'x' for rowtime attribute in watermark declaration. " + "Available columns are: [a, b, c, d, e, f, g, ts]"); assertThatThrownBy(() -> parse("alter table tb1 add watermark for b as b")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), " + "the supported precision 'p' is from 0 to 3, but the time field type is BIGINT NOT NULL"); assertThatThrownBy( () -> parse( "alter table tb1 add (x row<f0 string, f1 timestamp(3)>, watermark for x.f1 as x.f1)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Watermark strategy on nested column is not supported yet."); prepareNonManagedTable("tb2", true); assertThatThrownBy(() -> parse("alter table tb2 add watermark for ts as ts")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the watermark strategy " + "`ts` AS ts - interval '5' seconds. " + "You might want to drop it before adding a new one."); } @Test public void testAlterTableAddWatermark() throws Exception { prepareNonManagedTable("tb1", false); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); Operation operation = parse("alter table tb1 add watermark for ts as ts"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD WATERMARK FOR `ts`: TIMESTAMP(3) AS `ts`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder().fromSchema(originalSchema).watermark("ts", "`ts`").build()); operation = parse("alter table tb1 add (tss timestamp(3) not null, watermark for tss as tss)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `tss` TIMESTAMP(3) NOT NULL ,\n" + " ADD WATERMARK FOR `tss`: TIMESTAMP(3) NOT NULL AS `tss`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("tss", DataTypes.TIMESTAMP(3).notNull()) .watermark("tss", "`tss`") .build()); operation = parse( "alter table tb1 add (log_ts string not null,\n" + "tss as to_timestamp(log_ts),\n" + "watermark for tss as tss - interval '3' second)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `log_ts` STRING NOT NULL ,\n" + " ADD `tss` TIMESTAMP(3) AS `to_timestamp`(`log_ts`) ,\n" + " ADD WATERMARK FOR `tss`: TIMESTAMP(3) AS `tss` - INTERVAL '3' SECOND"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("log_ts", DataTypes.STRING().notNull()) .columnByExpression("tss", "`to_timestamp`(`log_ts`)") .watermark("tss", "`tss` - INTERVAL '3' SECOND") .build()); operation = parse( "alter table tb1 add (x row<f0 string, f1 timestamp(3) not null> not null, " + "y as x.f1, watermark for y as y - interval '1' day)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `x` ROW<`f0` STRING, `f1` TIMESTAMP(3) NOT NULL> NOT NULL ,\n" + " ADD `y` TIMESTAMP(3) NOT NULL AS `x`.`f1` ,\n" + " ADD WATERMARK FOR `y`: TIMESTAMP(3) NOT NULL AS `y` - INTERVAL '1' DAY"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column( "x", DataTypes.ROW(DataTypes.STRING(), DataTypes.TIMESTAMP(3).notNull()) .notNull()) .columnByExpression("y", "`x`.`f1`") .watermark("y", "`y` - INTERVAL '1' DAY") .build()); } @Test public void testFailedToAlterTableModifyColumn() throws Exception { prepareNonManagedTable("tb1", true); assertThatThrownBy(() -> parse("alter table tb1 modify (b int, b array<int not null>)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Encounter duplicate column `b`."); assertThatThrownBy(() -> parse("alter table tb1 modify x bigint")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Try to modify a column `x` which does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 modify a bigint after x")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Referenced column `x` by 'AFTER' does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 modify e array<int>")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'f'."); assertThatThrownBy(() -> parse("alter table tb1 modify a string")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'd'."); assertThatThrownBy(() -> parse("alter table tb1 modify b as a + 2")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'd'."); assertThatThrownBy(() -> parse("alter table tb1 modify (a timestamp(3), b multiset<int>)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'd'."); assertThatThrownBy(() -> parse("alter table tb1 modify ts int")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), " + "the supported precision 'p' is from 0 to 3, but the time field type is INT"); prepareNonManagedTable("tb2", 1); assertThatThrownBy(() -> parse("alter table tb2 modify (d int, a as b + 2)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct1'. Column 'a' is not a physical column."); assertThatThrownBy(() -> parse("alter table tb2 modify (d string, a int metadata virtual)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct1'. Column 'a' is not a physical column."); assertThatThrownBy(() -> parse("alter table tb2 modify (e.f0 string)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f0 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb2 modify (g string after e.f2)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type is not supported yet."); assertThatThrownBy(() -> parse("alter table tb2 modify (e.f0 string after e.f1)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f0 is not supported yet."); } @Test public void testAlterTableModifyColumn() throws Exception { prepareNonManagedTable("tb1", 2); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Operation operation = parse( "alter table tb1 modify b bigint not null comment 'move b to first and add comment' first"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " MODIFY `b` COMMENT 'move b to first and add comment',\n" + " MODIFY `b` FIRST"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("b", DataTypes.BIGINT().notNull()) .withComment("move b to first and add comment") .column("a", DataTypes.INT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .primaryKeyNamed("ct1", "a", "b") .build()); operation = parse("alter table tb1 modify ts timestamp(3) not null after e"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " MODIFY `ts` TIMESTAMP(3) NOT NULL,\n" + " MODIFY `ts` AFTER `e`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .column("ts", DataTypes.TIMESTAMP(3).notNull()) .withComment("just a comment") .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .primaryKeyNamed("ct1", "a", "b") .build()); operation = parse( "alter table tb1 modify (\n" + "d as a + 2 comment 'change d' after b,\n" + "c bigint first,\n" + "e string comment 'change e',\n" + "f as upper(e) comment 'change f' after ts,\n" + "g int not null comment 'change g',\n" + "constraint ct2 primary key(e) not enforced)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " MODIFY `d` INT NOT NULL AS `a` + 2 COMMENT 'change d' AFTER `b`,\n" + " MODIFY `c` BIGINT,\n" + " MODIFY `c` FIRST,\n" + " MODIFY `e` COMMENT 'change e',\n" + " MODIFY `e` STRING NOT NULL,\n" + " MODIFY `f` STRING NOT NULL AS UPPER(`e`) COMMENT 'change f' AFTER `ts`,\n" + " MODIFY `g` INT NOT NULL COMMENT 'change g' ,\n" + " MODIFY CONSTRAINT `ct2` PRIMARY KEY (`e`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("c", DataTypes.BIGINT()) .withComment("column comment") .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .columnByExpression("d", "`a` + 2") .withComment("change d") .column("e", DataTypes.STRING().notNull()) .withComment("change e") .column("g", DataTypes.INT().notNull()) .withComment("change g") .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .columnByExpression("f", "UPPER(`e`)") .withComment("change f") .primaryKeyNamed("ct2", "e") .build()); prepareNonManagedTable("tb2", true); tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2"); operation = parse( "alter table tb2 modify (ts int comment 'change ts',\n" + "f timestamp(3) not null,\n" + "e int metadata virtual,\n" + "watermark for f as f,\n" + "g multiset<int> not null comment 'change g' first)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb2\n" + " MODIFY `ts` COMMENT 'change ts',\n" + " MODIFY `ts` INT,\n" + " MODIFY `f` TIMESTAMP(3) NOT NULL ,\n" + " MODIFY `e` INT METADATA VIRTUAL ,\n" + " MODIFY `g` MULTISET<INT> NOT NULL COMMENT 'change g' FIRST,\n" + " MODIFY WATERMARK FOR `f`: TIMESTAMP(3) NOT NULL AS `f`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("g", DataTypes.MULTISET(DataTypes.INT()).notNull()) .withComment("change g") .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .columnByMetadata("e", DataTypes.INT(), null, true) .column("f", DataTypes.TIMESTAMP(3).notNull()) .column("ts", DataTypes.INT()) .withComment("change ts") .watermark("f", "`f`") .build()); } @Test public void testFailedToAlterTableModifyPk() throws Exception { prepareNonManagedTable("tb1", 0); assertThatThrownBy( () -> parse( "alter table tb1 modify constraint ct primary key (b) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table does not define any primary key constraint. You might want to add a new one."); prepareNonManagedTable("tb2", 1); assertThatThrownBy( () -> parse( "alter table tb2 modify constraint ct primary key (x) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid primary key 'ct'. Column 'x' does not exist."); assertThatThrownBy( () -> parse( "alter table tb2 modify constraint ct primary key (d) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct'. Column 'd' is not a physical column."); assertThatThrownBy( () -> parse( "alter table tb2 modify constraint ct primary key (g) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct'. Column 'g' is not a physical column."); } @Test public void testAlterTableModifyPk() throws Exception { prepareNonManagedTable("tb1", 1); Operation operation = parse("alter table tb1 modify constraint ct2 primary key (a, b) not enforced"); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); assertAlterTableSchema( operation, ObjectIdentifier.of("cat1", "db1", "tb1"), Schema.newBuilder() .fromColumns(originalSchema.getColumns()) .primaryKeyNamed("ct2", "a", "b") .build()); operation = parse("alter table tb1 modify primary key (c, a) not enforced"); assertAlterTableSchema( operation, ObjectIdentifier.of("cat1", "db1", "tb1"), Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .primaryKeyNamed("PK_c_a", "c", "a") .build()); } @Test public void testFailedToAlterTableModifyWatermark() throws Exception { prepareNonManagedTable("tb1", false); assertThatThrownBy( () -> parse( "alter table tb1 modify watermark for a as to_timestamp(a) - interval '1' minute")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table does not define any watermark. You might want to add a new one."); prepareNonManagedTable("tb2", true); assertThatThrownBy(() -> parse("alter table tb2 modify watermark for a as a")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), the supported precision 'p' is from 0 to 3, " + "but the time field type is INT NOT NULL"); assertThatThrownBy( () -> parse( "alter table tb2 modify watermark for c as to_timestamp(c) - interval '1' day")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), the supported precision 'p' is from 0 to 3, " + "but the time field type is STRING"); } @Test public void testAlterTableModifyWatermark() throws Exception { prepareNonManagedTable("tb1", true); Operation operation = parse("alter table tb1 modify watermark for ts as ts"); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); List<Schema.UnresolvedColumn> columns = originalSchema.getColumns(); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder().fromColumns(columns).watermark("ts", "`ts`").build()); operation = parse("alter table tb1 modify (g timestamp(3) not null, watermark for g as g)"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromColumns(columns.subList(0, columns.size() - 2)) .column("g", DataTypes.TIMESTAMP(3).notNull()) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .watermark("g", "`g`") .build()); } @Test public void testCreateViewWithMatchRecognize() { Map<String, String> prop = new HashMap<>(); prop.put("connector", "values"); prop.put("bounded", "true"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("id", DataTypes.INT().notNull()) .column("measurement", DataTypes.BIGINT().notNull()) .column( "ts", DataTypes.ROW( DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))) .build(), null, Collections.emptyList(), prop); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "events"), false); final String sql = "" + "CREATE TEMPORARY VIEW foo AS " + "SELECT * " + "FROM events MATCH_RECOGNIZE (" + " PARTITION BY id " + " ORDER BY ts ASC " + " MEASURES " + " next_step.measurement - this_step.measurement AS diff " + " AFTER MATCH SKIP TO NEXT ROW " + " PATTERN (this_step next_step)" + " DEFINE " + " this_step AS TRUE," + " next_step AS TRUE" + ")"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(CreateViewOperation.class); } @Test public void testCreateViewWithDynamicTableOptions() { Map<String, String> prop = new HashMap<>(); prop.put("connector", "values"); prop.put("bounded", "true"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT()) .column("f1", DataTypes.VARCHAR(20)) .build(), null, Collections.emptyList(), prop); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceA"), false); final String sql = "" + "create view test_view as\n" + "select *\n" + "from sourceA /*+ OPTIONS('changelog-mode'='I') */"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(CreateViewOperation.class); } @Test public void testBeginStatementSet() { final String sql = "BEGIN STATEMENT SET"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(BeginStatementSetOperation.class); final BeginStatementSetOperation beginStatementSetOperation = (BeginStatementSetOperation) operation; assertThat(beginStatementSetOperation.asSummaryString()).isEqualTo("BEGIN STATEMENT SET"); } @Test public void testEnd() { final String sql = "END"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(EndStatementSetOperation.class); final EndStatementSetOperation endStatementSetOperation = (EndStatementSetOperation) operation; assertThat(endStatementSetOperation.asSummaryString()).isEqualTo("END"); } @Test public void testSqlRichExplainWithSelect() { final String sql = "explain plan for select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(ExplainOperation.class); } @Test public void testSqlRichExplainWithInsert() { final String sql = "explain plan for insert into t1 select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(ExplainOperation.class); } @Test public void testSqlRichExplainWithStatementSet() { final String sql = "explain plan for statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(ExplainOperation.class); } @Test public void testExplainDetailsWithSelect() { final String sql = "explain estimated_cost, changelog_mode select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertExplainDetails(parse(sql, planner, parser)); } @Test public void testExplainDetailsWithInsert() { final String sql = "explain estimated_cost, changelog_mode insert into t1 select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertExplainDetails(parse(sql, planner, parser)); } @Test public void testExplainDetailsWithStatementSet() { final String sql = "explain estimated_cost, changelog_mode statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertExplainDetails(parse(sql, planner, parser)); } private void assertExplainDetails(Operation operation) { Set<String> expectedDetail = new HashSet<>(); expectedDetail.add(ExplainDetail.ESTIMATED_COST.toString()); expectedDetail.add(ExplainDetail.CHANGELOG_MODE.toString()); assertThat(operation) .asInstanceOf(type(ExplainOperation.class)) .satisfies( explain -> assertThat(explain.getExplainDetails()).isEqualTo(expectedDetail)); } @Test public void testSqlExecuteWithStatementSet() { final String sql = "execute statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(StatementSetOperation.class); } @Test public void testSqlExecuteWithInsert() { final String sql = "execute insert into t1 select a, b, c, d from t2 where a > 1"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(SinkModifyOperation.class); } @Test public void testSqlExecuteWithSelect() { final String sql = "execute select a, b, c, d from t2 where a > 1"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(QueryOperation.class); } @Test public void testAddJar() { Arrays.asList( "./test.\njar", "file: "../test-jar.jar", "/root/test.jar", "test\\ jar.jar", "oss: .forEach( jarPath -> { AddJarOperation operation = (AddJarOperation) parser.parse(String.format("ADD JAR '%s'", jarPath)) .get(0); assertThat(operation.getPath()).isEqualTo(jarPath); }); } @Test public void testRemoveJar() { Arrays.asList( "./test.\njar", "file: "../test-jar.jar", "/root/test.jar", "test\\ jar.jar", "oss: .forEach( jarPath -> { RemoveJarOperation operation = (RemoveJarOperation) parser.parse(String.format("REMOVE JAR '%s'", jarPath)) .get(0); assertThat(operation.getPath()).isEqualTo(jarPath); }); } @Test public void testShowJars() { final String sql = "SHOW JARS"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowJarsOperation.class); final ShowJarsOperation showModulesOperation = (ShowJarsOperation) operation; assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW JARS"); } @Test public void testSet() { Operation operation1 = parse("SET"); assertThat(operation1).isInstanceOf(SetOperation.class); SetOperation setOperation1 = (SetOperation) operation1; assertThat(setOperation1.getKey()).isNotPresent(); assertThat(setOperation1.getValue()).isNotPresent(); Operation operation2 = parse("SET 'test-key' = 'test-value'"); assertThat(operation2).isInstanceOf(SetOperation.class); SetOperation setOperation2 = (SetOperation) operation2; assertThat(setOperation2.getKey()).hasValue("test-key"); assertThat(setOperation2.getValue()).hasValue("test-value"); } @Test public void testReset() { Operation operation1 = parse("RESET"); assertThat(operation1).isInstanceOf(ResetOperation.class); assertThat(((ResetOperation) operation1).getKey()).isNotPresent(); Operation operation2 = parse("RESET 'test-key'"); assertThat(operation2).isInstanceOf(ResetOperation.class); assertThat(((ResetOperation) operation2).getKey()).isPresent(); assertThat(((ResetOperation) operation2).getKey()).hasValue("test-key"); } @ParameterizedTest @ValueSource(strings = {"SET", "SET;", "SET ;", "SET\t;", "SET\n;"}) public void testSetCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(SetOperation.class); } @ParameterizedTest @ValueSource(strings = {"HELP", "HELP;", "HELP ;", "HELP\t;", "HELP\n;"}) public void testHelpCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(HelpOperation.class); } @ParameterizedTest @ValueSource(strings = {"CLEAR", "CLEAR;", "CLEAR ;", "CLEAR\t;", "CLEAR\n;"}) public void testClearCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(ClearOperation.class); } @ParameterizedTest @ValueSource( strings = { "QUIT;", "QUIT;", "QUIT ;", "QUIT\t;", "QUIT\n;", "EXIT;", "EXIT ;", "EXIT\t;", "EXIT\n;", "EXIT ; " }) public void testQuitCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(QuitOperation.class); } private static TestItem createTestItem(Object... args) { assertThat(args).hasSize(2); final String testExpr = (String) args[0]; TestItem testItem = TestItem.fromTestExpr(testExpr); if (args[1] instanceof String) { testItem.withExpectedError((String) args[1]); } else { testItem.withExpectedType(args[1]); } return testItem; } private void checkExplainSql(String sql) { FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlRichExplain.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assertThat(operation).isInstanceOf(ExplainOperation.class); } private void assertShowFunctions( String sql, String expectedSummary, FunctionScope expectedScope) { Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowFunctionsOperation.class); final ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation; assertThat(showFunctionsOperation.getFunctionScope()).isEqualTo(expectedScope); assertThat(showFunctionsOperation.asSummaryString()).isEqualTo(expectedSummary); } private void assertAlterTableOptions( Operation operation, ObjectIdentifier expectedIdentifier, Map<String, String> expectedOptions, List<TableChange> expectedChanges, String expectedSummary) { assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); final AlterTableChangeOperation alterTableOptionsOperation = (AlterTableChangeOperation) operation; assertThat(alterTableOptionsOperation.getTableIdentifier()).isEqualTo(expectedIdentifier); assertThat(alterTableOptionsOperation.getNewTable().getOptions()) .isEqualTo(expectedOptions); assertThat(expectedChanges).isEqualTo(alterTableOptionsOperation.getTableChanges()); assertThat(alterTableOptionsOperation.asSummaryString()).isEqualTo(expectedSummary); } private void assertAlterTableSchema( Operation operation, ObjectIdentifier expectedIdentifier, Schema expectedSchema) { assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); final AlterTableChangeOperation alterTableChangeOperation = (AlterTableChangeOperation) operation; assertThat(alterTableChangeOperation.getTableIdentifier()).isEqualTo(expectedIdentifier); assertThat(alterTableChangeOperation.getNewTable().getUnresolvedSchema()) .isEqualTo(expectedSchema); } private Operation parse(String sql, FlinkPlannerImpl planner, CalciteParser parser) { SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } private Operation parse(String sql) { FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } private void prepareNonManagedTable(boolean hasConstraint) throws Exception { prepareNonManagedTable("tb1", hasConstraint ? 1 : 0); } private void prepareNonManagedTable(String tableName, int numOfPkFields) throws Exception { prepareTable(tableName, false, false, false, numOfPkFields); } private void prepareNonManagedTable(String tableName, boolean hasWatermark) throws Exception { prepareTable(tableName, false, false, hasWatermark, 0); } private void prepareManagedTable(boolean hasPartition) throws Exception { TestManagedTableFactory.MANAGED_TABLES.put( ObjectIdentifier.of("cat1", "db1", "tb1"), new AtomicReference<>()); prepareTable("tb1", true, hasPartition, false, 0); } private void prepareTable( String tableName, boolean managedTable, boolean hasPartition, boolean hasWatermark, int numOfPkFields) throws Exception { Catalog catalog = new GenericInMemoryCatalog("default", "default"); if (!catalogManager.getCatalog("cat1").isPresent()) { catalogManager.registerCatalog("cat1", catalog); } catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true); Schema.Builder builder = Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment"); Map<String, String> options = new HashMap<>(); options.put("k", "v"); if (!managedTable) { options.put("connector", "dummy"); } if (numOfPkFields == 0) { } else if (numOfPkFields == 1) { builder.primaryKeyNamed("ct1", "a"); } else if (numOfPkFields == 2) { builder.primaryKeyNamed("ct1", "a", "b"); } else if (numOfPkFields == 3) { builder.primaryKeyNamed("ct1", "a", "b", "c"); } else { throw new IllegalArgumentException( String.format("Don't support to set pk with %s fields.", numOfPkFields)); } if (hasWatermark) { builder.watermark("ts", "ts - interval '5' seconds"); } CatalogTable catalogTable = CatalogTable.of( builder.build(), "a table", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options)); catalogManager.setCurrentCatalog("cat1"); catalogManager.setCurrentDatabase("db1"); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", tableName); catalogManager.createTable(catalogTable, tableIdentifier, true); } private FlinkPlannerImpl getPlannerBySqlDialect(SqlDialect sqlDialect) { tableConfig.setSqlDialect(sqlDialect); return plannerContext.createFlinkPlanner(); } private CalciteParser getParserBySqlDialect(SqlDialect sqlDialect) { tableConfig.setSqlDialect(sqlDialect); return plannerContext.createCalciteParser(); } private void checkAlterTableCompact(Operation operation, Map<String, String> staticPartitions) { assertThat(operation).isInstanceOf(SinkModifyOperation.class); SinkModifyOperation modifyOperation = (SinkModifyOperation) operation; assertThat(modifyOperation.getStaticPartitions()) .containsExactlyInAnyOrderEntriesOf(staticPartitions); assertThat(modifyOperation.isOverwrite()).isFalse(); assertThat(modifyOperation.getDynamicOptions()) .containsEntry( TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE); ContextResolvedTable contextResolvedTable = modifyOperation.getContextResolvedTable(); assertThat(contextResolvedTable.getIdentifier()) .isEqualTo(ObjectIdentifier.of("cat1", "db1", "tb1")); assertThat(modifyOperation.getChild()).isInstanceOf(SourceQueryOperation.class); SourceQueryOperation child = (SourceQueryOperation) modifyOperation.getChild(); assertThat(child.getChildren()).isEmpty(); assertThat(child.getDynamicOptions()).containsEntry("k", "v"); assertThat(child.getDynamicOptions()) .containsEntry( TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE); } private static class TestItem { private final String testExpr; @Nullable private Object expectedType; @Nullable private String expectedError; private TestItem(String testExpr) { this.testExpr = testExpr; } static TestItem fromTestExpr(String testExpr) { return new TestItem(testExpr); } TestItem withExpectedType(Object expectedType) { this.expectedType = expectedType; return this; } TestItem withExpectedError(String expectedError) { this.expectedError = expectedError; return this; } @Override public String toString() { return this.testExpr; } } private Operation parseAndConvert(String sql) { final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } }
class SqlToOperationConverterTest { private final boolean isStreamingMode = false; private final TableConfig tableConfig = TableConfig.getDefault(); private final Catalog catalog = new GenericInMemoryCatalog("MockCatalog", "default"); private final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager() .defaultCatalog("builtin", catalog) .config( Configuration.fromMap( Collections.singletonMap( ExecutionOptions.RUNTIME_MODE.key(), RuntimeExecutionMode.BATCH.name()))) .build(); private final PlannerMocks plannerMocks = PlannerMocks.newBuilder() .withBatchMode(true) .withTableConfig(tableConfig) .withCatalogManager(catalogManager) .withRootSchema( asRootSchema( new CatalogManagerCalciteSchema( catalogManager, isStreamingMode))) .build(); private final PlannerContext plannerContext = plannerMocks.getPlannerContext(); private final FunctionCatalog functionCatalog = plannerMocks.getFunctionCatalog(); private final Supplier<FlinkPlannerImpl> plannerSupplier = plannerContext::createFlinkPlanner; private final Parser parser = new ParserImpl( catalogManager, plannerSupplier, () -> plannerSupplier.get().parser(), plannerContext.getRexFactory()); @BeforeEach public void before() throws TableAlreadyExistException, DatabaseNotExistException { catalogManager.initSchemaResolver( isStreamingMode, ExpressionResolverMocks.basicResolver(catalogManager, functionCatalog, parser)); final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1"); final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2"); final TableSchema tableSchema = TableSchema.builder() .field("a", DataTypes.BIGINT()) .field("b", DataTypes.VARCHAR(Integer.MAX_VALUE)) .field("c", DataTypes.INT()) .field("d", DataTypes.VARCHAR(Integer.MAX_VALUE)) .build(); Map<String, String> options = new HashMap<>(); options.put("connector", "COLLECTION"); final CatalogTable catalogTable = new CatalogTableImpl(tableSchema, options, ""); catalog.createTable(path1, catalogTable, true); catalog.createTable(path2, catalogTable, true); } @AfterEach public void after() throws TableNotExistException { final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1"); final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2"); catalog.dropTable(path1, true); catalog.dropTable(path2, true); } @Test public void testUseCatalog() { final String sql = "USE CATALOG cat1"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(UseCatalogOperation.class); assertThat(((UseCatalogOperation) operation).getCatalogName()).isEqualTo("cat1"); assertThat(operation.asSummaryString()).isEqualTo("USE CATALOG cat1"); } @Test public void testUseDatabase() { final String sql1 = "USE db1"; Operation operation1 = parse(sql1); assertThat(operation1).isInstanceOf(UseDatabaseOperation.class); assertThat(((UseDatabaseOperation) operation1).getCatalogName()).isEqualTo("builtin"); assertThat(((UseDatabaseOperation) operation1).getDatabaseName()).isEqualTo("db1"); final String sql2 = "USE cat1.db1"; Operation operation2 = parse(sql2); assertThat(operation2).isInstanceOf(UseDatabaseOperation.class); assertThat(((UseDatabaseOperation) operation2).getCatalogName()).isEqualTo("cat1"); assertThat(((UseDatabaseOperation) operation2).getDatabaseName()).isEqualTo("db1"); } @Test public void testUseDatabaseWithException() { final String sql = "USE cat1.db1.tbl1"; assertThatThrownBy(() -> parse(sql)).isInstanceOf(ValidationException.class); } @Test public void testCreateDatabase() { final String[] createDatabaseSqls = new String[] { "create database db1", "create database if not exists cat1.db1", "create database cat1.db1 comment 'db1_comment'", "create database cat1.db1 comment 'db1_comment' with ('k1' = 'v1', 'K2' = 'V2')" }; final String[] expectedCatalogs = new String[] {"builtin", "cat1", "cat1", "cat1"}; final String expectedDatabase = "db1"; final String[] expectedComments = new String[] {null, null, "db1_comment", "db1_comment"}; final boolean[] expectedIgnoreIfExists = new boolean[] {false, true, false, false}; Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); final Map[] expectedProperties = new Map[] { new HashMap<String, String>(), new HashMap<String, String>(), new HashMap<String, String>(), new HashMap(properties) }; for (int i = 0; i < createDatabaseSqls.length; i++) { Operation operation = parse(createDatabaseSqls[i]); assertThat(operation).isInstanceOf(CreateDatabaseOperation.class); final CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation; assertThat(createDatabaseOperation.getCatalogName()).isEqualTo(expectedCatalogs[i]); assertThat(createDatabaseOperation.getDatabaseName()).isEqualTo(expectedDatabase); assertThat(createDatabaseOperation.getCatalogDatabase().getComment()) .isEqualTo(expectedComments[i]); assertThat(createDatabaseOperation.isIgnoreIfExists()) .isEqualTo(expectedIgnoreIfExists[i]); assertThat(createDatabaseOperation.getCatalogDatabase().getProperties()) .isEqualTo(expectedProperties[i]); } } @Test public void testDropDatabase() { final String[] dropDatabaseSqls = new String[] { "drop database db1", "drop database if exists db1", "drop database if exists cat1.db1 CASCADE", "drop database if exists cat1.db1 RESTRICT" }; final String[] expectedCatalogs = new String[] {"builtin", "builtin", "cat1", "cat1"}; final String expectedDatabase = "db1"; final boolean[] expectedIfExists = new boolean[] {false, true, true, true}; final boolean[] expectedIsCascades = new boolean[] {false, false, true, false}; for (int i = 0; i < dropDatabaseSqls.length; i++) { Operation operation = parse(dropDatabaseSqls[i]); assertThat(operation).isInstanceOf(DropDatabaseOperation.class); final DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation; assertThat(dropDatabaseOperation.getCatalogName()).isEqualTo(expectedCatalogs[i]); assertThat(dropDatabaseOperation.getDatabaseName()).isEqualTo(expectedDatabase); assertThat(dropDatabaseOperation.isIfExists()).isEqualTo(expectedIfExists[i]); assertThat(dropDatabaseOperation.isCascade()).isEqualTo(expectedIsCascades[i]); } } @Test public void testAlterDatabase() throws Exception { catalogManager.registerCatalog("cat1", new GenericInMemoryCatalog("default", "default")); catalogManager .getCatalog("cat1") .get() .createDatabase( "db1", new CatalogDatabaseImpl(new HashMap<>(), "db1_comment"), true); final String sql = "alter database cat1.db1 set ('k1'='v1', 'K2'='V2')"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(AlterDatabaseOperation.class); Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation; assertThat(alterDatabaseOperation.getDatabaseName()).isEqualTo("db1"); assertThat(alterDatabaseOperation.getCatalogName()).isEqualTo("cat1"); assertThat(alterDatabaseOperation.getCatalogDatabase().getComment()) .isEqualTo("db1_comment"); assertThat(alterDatabaseOperation.getCatalogDatabase().getProperties()) .isEqualTo(properties); } @Test public void testLoadModule() { final String sql = "LOAD MODULE dummy WITH ('k1' = 'v1', 'k2' = 'v2')"; final String expectedModuleName = "dummy"; final Map<String, String> expectedOptions = new HashMap<>(); expectedOptions.put("k1", "v1"); expectedOptions.put("k2", "v2"); Operation operation = parse(sql); assertThat(operation).isInstanceOf(LoadModuleOperation.class); final LoadModuleOperation loadModuleOperation = (LoadModuleOperation) operation; assertThat(loadModuleOperation.getModuleName()).isEqualTo(expectedModuleName); assertThat(loadModuleOperation.getOptions()).isEqualTo(expectedOptions); } @Test public void testUnloadModule() { final String sql = "UNLOAD MODULE dummy"; final String expectedModuleName = "dummy"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(UnloadModuleOperation.class); final UnloadModuleOperation unloadModuleOperation = (UnloadModuleOperation) operation; assertThat(unloadModuleOperation.getModuleName()).isEqualTo(expectedModuleName); } @Test public void testUseOneModule() { final String sql = "USE MODULES dummy"; final List<String> expectedModuleNames = Collections.singletonList("dummy"); Operation operation = parse(sql); assertThat(operation).isInstanceOf(UseModulesOperation.class); final UseModulesOperation useModulesOperation = (UseModulesOperation) operation; assertThat(useModulesOperation.getModuleNames()).isEqualTo(expectedModuleNames); assertThat(useModulesOperation.asSummaryString()).isEqualTo("USE MODULES: [dummy]"); } @Test public void testUseMultipleModules() { final String sql = "USE MODULES x, y, z"; final List<String> expectedModuleNames = Arrays.asList("x", "y", "z"); Operation operation = parse(sql); assertThat(operation).isInstanceOf(UseModulesOperation.class); final UseModulesOperation useModulesOperation = (UseModulesOperation) operation; assertThat(useModulesOperation.getModuleNames()).isEqualTo(expectedModuleNames); assertThat(useModulesOperation.asSummaryString()).isEqualTo("USE MODULES: [x, y, z]"); } @Test public void testShowModules() { final String sql = "SHOW MODULES"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowModulesOperation.class); final ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation; assertThat(showModulesOperation.requireFull()).isFalse(); assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW MODULES"); } @Test public void testShowTables() { final String sql = "SHOW TABLES from cat1.db1 not like 't%'"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowTablesOperation.class); ShowTablesOperation showTablesOperation = (ShowTablesOperation) operation; assertThat(showTablesOperation.getCatalogName()).isEqualTo("cat1"); assertThat(showTablesOperation.getDatabaseName()).isEqualTo("db1"); assertThat(showTablesOperation.getPreposition()).isEqualTo("FROM"); assertThat(showTablesOperation.isUseLike()).isTrue(); assertThat(showTablesOperation.isNotLike()).isTrue(); final String sql2 = "SHOW TABLES in db2"; showTablesOperation = (ShowTablesOperation) parse(sql2); assertThat(showTablesOperation.getCatalogName()).isEqualTo("builtin"); assertThat(showTablesOperation.getDatabaseName()).isEqualTo("db2"); assertThat(showTablesOperation.getPreposition()).isEqualTo("IN"); assertThat(showTablesOperation.isUseLike()).isFalse(); assertThat(showTablesOperation.isNotLike()).isFalse(); final String sql3 = "SHOW TABLES"; showTablesOperation = (ShowTablesOperation) parse(sql3); assertThat(showTablesOperation.getCatalogName()).isNull(); assertThat(showTablesOperation.getDatabaseName()).isNull(); assertThat(showTablesOperation.getPreposition()).isNull(); } @Test public void testShowFullModules() { final String sql = "SHOW FULL MODULES"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowModulesOperation.class); final ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation; assertThat(showModulesOperation.requireFull()).isTrue(); assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW FULL MODULES"); } @Test public void testShowFunctions() { final String sql1 = "SHOW FUNCTIONS"; assertShowFunctions(sql1, sql1, FunctionScope.ALL); final String sql2 = "SHOW USER FUNCTIONS"; assertShowFunctions(sql2, sql2, FunctionScope.USER); } @Test public void testCreateTable() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar" + ")\n" + " PARTITIONED BY (a, d)\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); assertThat(catalogTable.getPartitionKeys()).hasSameElementsAs(Arrays.asList("a", "d")); assertThat(catalogTable.getSchema().getFieldNames()) .isEqualTo(new String[] {"a", "b", "c", "d"}); assertThat(catalogTable.getSchema().getFieldDataTypes()) .isEqualTo( new DataType[] { DataTypes.BIGINT(), DataTypes.VARCHAR(Integer.MAX_VALUE), DataTypes.INT(), DataTypes.VARCHAR(Integer.MAX_VALUE) }); } @Test public void testCreateTableWithPrimaryKey() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 primary key(a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); TableSchema tableSchema = catalogTable.getSchema(); assertThat( tableSchema .getPrimaryKey() .map(UniqueConstraint::asSummaryString) .orElse("fakeVal")) .isEqualTo("CONSTRAINT ct1 PRIMARY KEY (a, b)"); assertThat(tableSchema.getFieldNames()).isEqualTo(new String[] {"a", "b", "c", "d"}); assertThat(tableSchema.getFieldDataTypes()) .isEqualTo( new DataType[] { DataTypes.BIGINT().notNull(), DataTypes.STRING().notNull(), DataTypes.INT(), DataTypes.STRING() }); } @Test public void testPrimaryKeyOnGeneratedColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint not null,\n" + " b varchar not null,\n" + " c as 2 * (a + 1),\n" + " constraint ct1 primary key (b, c) not enforced" + ") with (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Could not create a PRIMARY KEY with column 'c' at line 5, column 34.\n" + "A PRIMARY KEY constraint must be declared on physical columns."); } @Test public void testPrimaryKeyNonExistentColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint not null,\n" + " b varchar not null,\n" + " c as 2 * (a + 1),\n" + " constraint ct1 primary key (b, d) not enforced" + ") with (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Primary key column 'd' is not defined in the schema at line 5, column 34"); } @Test public void testCreateTableWithMinusInOptionKey() { final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c varchar\n" + ") with (\n" + " 'a-B-c-d124' = 'Ab',\n" + " 'a.b-c-d.e-f.g' = 'ada',\n" + " 'a.b-c-d.e-f1231.g' = 'ada',\n" + " 'a.b-c-d.*' = 'adad')\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlCreateTable.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); Map<String, String> options = catalogTable.getOptions().entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Map<String, String> sortedProperties = new TreeMap<>(options); final String expected = "{a-B-c-d124=Ab, " + "a.b-c-d.*=adad, " + "a.b-c-d.e-f.g=ada, " + "a.b-c-d.e-f1231.g=ada}"; assertThat(sortedProperties.toString()).isEqualTo(expected); } @Test public void testExplainWithSelect() { final String sql = "explain select * from t1"; checkExplainSql(sql); } @Test public void testExplainWithInsert() { final String sql = "explain insert into t2 select * from t1"; checkExplainSql(sql); } @Test public void testExplainWithUnion() { final String sql = "explain select * from t1 union select * from t2"; checkExplainSql(sql); } @Test public void testExplainWithExplainDetails() { String sql = "explain changelog_mode, estimated_cost, json_execution_plan select * from t1"; checkExplainSql(sql); } @Test public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException { CatalogFunction cf = new CatalogFunctionImpl(JavaUserDefinedScalarFunctions.JavaFunc5.class.getName()); catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true); final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c timestamp(3),\n" + " watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + " 'connector.type' = 'kafka')\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlCreateTable.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); Map<String, String> properties = catalogTable.toProperties(); Map<String, String> expected = new HashMap<>(); expected.put("schema.0.name", "a"); expected.put("schema.0.data-type", "INT"); expected.put("schema.1.name", "b"); expected.put("schema.1.data-type", "BIGINT"); expected.put("schema.2.name", "c"); expected.put("schema.2.data-type", "TIMESTAMP(3)"); expected.put("schema.watermark.0.rowtime", "c"); expected.put( "schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND"); expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)"); expected.put("connector.type", "kafka"); assertThat(properties).isEqualTo(expected); } @Test public void testBasicCreateTableLike() { Map<String, String> sourceProperties = new HashMap<>(); sourceProperties.put("format.type", "json"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .build(), null, Collections.emptyList(), sourceProperties); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")\n" + "PARTITIONED BY (a, f0)\n" + "with (\n" + " 'connector.type' = 'kafka'" + ")\n" + "like sourceTable"; Operation operation = parseAndConvert(sql); assertThat(operation) .is( new HamcrestCondition<>( isCreateTableOperation( withSchema( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .column("a", DataTypes.INT()) .watermark( "f1", "`f1` - INTERVAL '5' SECOND") .build()), withOptions( entry("connector.type", "kafka"), entry("format.type", "json")), partitionedBy("a", "f0")))); } @Test public void testCreateTableLikeWithFullPath() { Map<String, String> sourceProperties = new HashMap<>(); sourceProperties.put("connector.type", "kafka"); sourceProperties.put("format.type", "json"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .build(), null, Collections.emptyList(), sourceProperties); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table mytable like `builtin`.`default`.sourceTable"; Operation operation = parseAndConvert(sql); assertThat(operation) .is( new HamcrestCondition<>( isCreateTableOperation( withSchema( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .build()), withOptions( entry("connector.type", "kafka"), entry("format.type", "json"))))); } @Test public void testMergingCreateTableLike() { Map<String, String> sourceProperties = new HashMap<>(); sourceProperties.put("format.type", "json"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .columnByExpression("f2", "`f0` + 12345") .watermark("f1", "`f1` - interval '1' second") .build(), null, Arrays.asList("f0", "f1"), sourceProperties); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")\n" + "PARTITIONED BY (a, f0)\n" + "with (\n" + " 'connector.type' = 'kafka'" + ")\n" + "like sourceTable (\n" + " EXCLUDING GENERATED\n" + " EXCLUDING PARTITIONS\n" + " OVERWRITING OPTIONS\n" + " OVERWRITING WATERMARKS" + ")"; Operation operation = parseAndConvert(sql); assertThat(operation) .is( new HamcrestCondition<>( isCreateTableOperation( withSchema( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column("f1", DataTypes.TIMESTAMP(3)) .column("a", DataTypes.INT()) .watermark( "f1", "`f1` - INTERVAL '5' SECOND") .build()), withOptions( entry("connector.type", "kafka"), entry("format.type", "json")), partitionedBy("a", "f0")))); } @Test public void testCreateTableInvalidPartition() { final String sql = "create table derivedTable(\n" + " a int\n" + ")\n" + "PARTITIONED BY (f3)"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Partition column 'f3' not defined in the table schema. Available columns: ['a']"); } @Test public void testCreateTableLikeInvalidPartition() { CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder().column("f0", DataTypes.INT().notNull()).build(), null, Collections.emptyList(), Collections.emptyMap()); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int\n" + ")\n" + "PARTITIONED BY (f3)\n" + "like sourceTable"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Partition column 'f3' not defined in the table schema. Available columns: ['f0', 'a']"); } @Test public void testCreateTableInvalidWatermark() { final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The rowtime attribute field 'f1' is not defined in the table schema," + " at line 3, column 17\n" + "Available fields: ['a']"); } @Test public void testCreateTableLikeInvalidWatermark() { CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder().column("f0", DataTypes.INT().notNull()).build(), null, Collections.emptyList(), Collections.emptyMap()); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1 as `f1` - interval '5' second\n" + ")\n" + "like sourceTable"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The rowtime attribute field 'f1' is not defined in the table schema," + " at line 3, column 17\n" + "Available fields: ['f0', 'a']"); } @Test public void testCreateTableLikeNestedWatermark() { CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT().notNull()) .column( "f1", DataTypes.ROW( DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))) .build(), null, Collections.emptyList(), Collections.emptyMap()); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false); final String sql = "create table derivedTable(\n" + " a int,\n" + " watermark for f1.t as f1.t - interval '5' second\n" + ")\n" + "like sourceTable"; assertThatThrownBy(() -> parseAndConvert(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The rowtime attribute field 'f1.t' is not defined in the table schema," + " at line 3, column 20\n" + "Nested field 't' was not found in a composite type:" + " ROW<`tmstmp` TIMESTAMP(3)>."); } @Test public void testSqlInsertWithStaticPartition() { final String sql = "insert into t1 partition(a=1) select b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(SinkModifyOperation.class); SinkModifyOperation sinkModifyOperation = (SinkModifyOperation) operation; final Map<String, String> expectedStaticPartitions = new HashMap<>(); expectedStaticPartitions.put("a", "1"); assertThat(sinkModifyOperation.getStaticPartitions()).isEqualTo(expectedStaticPartitions); } @Test public void testSqlInsertWithDynamicTableOptions() { final String sql = "insert into t1 /*+ OPTIONS('k1'='v1', 'k2'='v2') */\n" + "select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(SinkModifyOperation.class); SinkModifyOperation sinkModifyOperation = (SinkModifyOperation) operation; Map<String, String> dynamicOptions = sinkModifyOperation.getDynamicOptions(); assertThat(dynamicOptions).isNotNull(); assertThat(dynamicOptions.size()).isEqualTo(2); assertThat(dynamicOptions.toString()).isEqualTo("{k1=v1, k2=v2}"); } @Test public void testDynamicTableWithInvalidOptions() { final String sql = "select * from t1 /*+ OPTIONS('opt1', 'opt2') */"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertThatThrownBy(() -> parse(sql, planner, parser)) .isInstanceOf(AssertionError.class) .hasMessageContaining( "Hint [OPTIONS] only support " + "non empty key value options"); } @Test public void testCreateTableWithFullDataTypes() { final List<TestItem> testItems = Arrays.asList( createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIME(3)", DataTypes.TIME()), createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem( "TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem( "TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem( "ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem( "ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem( "INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem( "INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem( "MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem( "INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem( "INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem( "MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), createTestItem( "ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem( "ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem( "ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem( "ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), createTestItem( "ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem( "ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem( "ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem( "MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem( "ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW( DataTypes.FIELD( "f0", DataTypes.ROW( DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD( "f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD( "f2", DataTypes.MULTISET(DataTypes.BOOLEAN()))))); StringBuilder buffer = new StringBuilder("create table t1(\n"); for (int i = 0; i < testItems.size(); i++) { buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr); if (i == testItems.size() - 1) { buffer.append(")"); } else { buffer.append(",\n"); } } final String sql = buffer.toString(); final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlCreateTable.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema(); Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray(); assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes); } @Test public void testCreateTableWithComputedColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a int,\n" + " b varchar, \n" + " c as a - 1, \n" + " d as b || '$$', \n" + " e as my_udf1(a)," + " f as `default`.my_udf2(a) + 1," + " g as builtin.`default`.my_udf3(a) || ' + ")\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$); functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$); functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$); FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateTableOperation.class); CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); assertThat(catalogTable.getSchema().getFieldNames()) .isEqualTo(new String[] {"a", "b", "c", "d", "e", "f", "g"}); assertThat(catalogTable.getSchema().getFieldDataTypes()) .isEqualTo( new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() }); String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream() .filter(ComputedColumn.class::isInstance) .map(ComputedColumn.class::cast) .map(ComputedColumn::getExpression) .toArray(String[]::new); String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || ' }; assertThat(columnExpressions).isEqualTo(expected); } @Test public void testCreateTableWithMetadataColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a INT,\n" + " b STRING,\n" + " c INT METADATA,\n" + " d INT METADATA FROM 'other.key',\n" + " e INT METADATA VIRTUAL\n" + ")\n" + " WITH (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateTableOperation.class); final CreateTableOperation op = (CreateTableOperation) operation; final TableSchema actualSchema = op.getCatalogTable().getSchema(); final TableSchema expectedSchema = TableSchema.builder() .add(TableColumn.physical("a", DataTypes.INT())) .add(TableColumn.physical("b", DataTypes.STRING())) .add(TableColumn.metadata("c", DataTypes.INT())) .add(TableColumn.metadata("d", DataTypes.INT(), "other.key")) .add(TableColumn.metadata("e", DataTypes.INT(), true)) .build(); assertThat(actualSchema).isEqualTo(expectedSchema); } @Test public void testCreateFunction() { String sql = "CREATE FUNCTION test_udf AS 'org.apache.fink.function.function1' " + "LANGUAGE JAVA USING JAR 'file: final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateCatalogFunctionOperation.class); CatalogFunction actualFunction = ((CreateCatalogFunctionOperation) operation).getCatalogFunction(); assertThat(operation.asSummaryString()) .isEqualTo( "CREATE CATALOG FUNCTION: (catalogFunction: [Optional[This is a user-defined function]], " + "identifier: [`builtin`.`default`.`test_udf`], ignoreIfExists: [false], isTemporary: [false])"); CatalogFunction expected = new CatalogFunctionImpl( "org.apache.fink.function.function1", FunctionLanguage.JAVA, Collections.singletonList( new ResourceUri(ResourceType.JAR, "file: assertThat(actualFunction).isEqualTo(expected); sql = "CREATE TEMPORARY SYSTEM FUNCTION test_udf2 AS 'org.apache.fink.function.function2' " + "LANGUAGE SCALA USING JAR 'file: operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assertThat(operation).isInstanceOf(CreateTempSystemFunctionOperation.class); assertThat(operation.asSummaryString()) .isEqualTo( "CREATE TEMPORARY SYSTEM FUNCTION: (functionName: [test_udf2], " + "catalogFunction: [CatalogFunctionImpl{className='org.apache.fink.function.function2', " + "functionLanguage='SCALA', " + "functionResource='[ResourceUri{resourceType=JAR, uri='file: + "ignoreIfExists: [false], functionLanguage: [SCALA])"); } @Test public void testAlterTable() throws Exception { prepareNonManagedTable(false); final String[] renameTableSqls = new String[] { "alter table cat1.db1.tb1 rename to tb2", "alter table db1.tb1 rename to tb2", "alter table tb1 rename to cat1.db1.tb2", }; final ObjectIdentifier expectedIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); final ObjectIdentifier expectedNewIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2"); for (int i = 0; i < renameTableSqls.length; i++) { Operation operation = parse(renameTableSqls[i]); assertThat(operation).isInstanceOf(AlterTableRenameOperation.class); final AlterTableRenameOperation alterTableRenameOperation = (AlterTableRenameOperation) operation; assertThat(alterTableRenameOperation.getTableIdentifier()) .isEqualTo(expectedIdentifier); assertThat(alterTableRenameOperation.getNewTableIdentifier()) .isEqualTo(expectedNewIdentifier); } Operation operation = parse("alter table cat1.db1.tb1 set ('k1' = 'v1', 'K2' = 'V2')"); Map<String, String> expectedOptions = new HashMap<>(); expectedOptions.put("connector", "dummy"); expectedOptions.put("k", "v"); expectedOptions.put("k1", "v1"); expectedOptions.put("K2", "V2"); assertAlterTableOptions( operation, expectedIdentifier, expectedOptions, Arrays.asList(TableChange.set("k1", "v1"), TableChange.set("K2", "V2")), "ALTER TABLE cat1.db1.tb1\n SET 'k1' = 'v1',\n SET 'K2' = 'V2'"); operation = parse("alter table cat1.db1.tb1 reset ('k')"); assertAlterTableOptions( operation, expectedIdentifier, Collections.singletonMap("connector", "dummy"), Collections.singletonList(TableChange.reset("k")), "ALTER TABLE cat1.db1.tb1\n RESET 'k'"); assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ('connector')")) .isInstanceOf(ValidationException.class) .hasMessageContaining("ALTER TABLE RESET does not support changing 'connector'"); assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ()")) .isInstanceOf(ValidationException.class) .hasMessageContaining("ALTER TABLE RESET does not support empty key"); } @Test public void testAlterTableRenameColumn() throws Exception { prepareTable("tb1", false, false, true, 3); Operation operation = parse("alter table tb1 rename c to c1"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo("ALTER TABLE cat1.db1.tb1\n MODIFY `c` TO `c1`"); assertThat(((AlterTableChangeOperation) operation).getNewTable().getUnresolvedSchema()) .isEqualTo( Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c1", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .watermark("ts", "ts - interval '5' seconds") .primaryKeyNamed("ct1", "a", "b", "c1") .build()); operation = parse("alter table tb1 rename f to f1"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo("ALTER TABLE cat1.db1.tb1\n MODIFY `f` TO `f1`"); assertThat(((AlterTableChangeOperation) operation).getNewTable().getUnresolvedSchema()) .isEqualTo( Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f1", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .watermark("ts", "ts - interval '5' seconds") .primaryKeyNamed("ct1", "a", "b", "c") .build()); assertThatThrownBy(() -> parse("alter table tb1 rename a to a1")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `a` is referenced by computed column `d`."); assertThatThrownBy(() -> parse("alter table tb1 rename ts to ts1")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `ts` is referenced by watermark expression."); assertThatThrownBy(() -> parse("alter table tb1 rename e.f1 to e.f11")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f1 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 rename c to a")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `a` already existed in table schema."); CatalogTable catalogTable2 = CatalogTable.of( Schema.newBuilder() .column("a", DataTypes.STRING().notNull()) .column("b", DataTypes.INT().notNull()) .column("e", DataTypes.STRING()) .columnByExpression("j", $("e").upperCase()) .columnByExpression("g", "TO_TIMESTAMP(e)") .primaryKey("a", "b") .build(), "tb2", Collections.singletonList("a"), Collections.emptyMap()); catalogManager .getCatalog("cat1") .get() .createTable(new ObjectPath("db1", "tb2"), catalogTable2, true); assertThatThrownBy(() -> parse("alter table `cat1`.`db1`.`tb2` rename e to e1")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Failed to execute ALTER TABLE statement.\nThe column `e` is referenced by computed column `g`, `j`."); assertThatThrownBy(() -> parse("alter table tb2 rename a to a1")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Failed to execute ALTER TABLE statement.\nThe column `a` is used as the partition keys."); } @Test public void testFailedToAlterTableDropColumn() throws Exception { prepareTable("tb1", false, false, true, 3); assertThatThrownBy(() -> parse("alter table tb1 drop x")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `x` does not exist in the base table."); assertThatThrownBy(() -> parse("alter table tb1 drop (g, x)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `x` does not exist in the base table."); assertThatThrownBy(() -> parse("alter table tb1 drop (g, c, g)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Duplicate column `g`."); assertThatThrownBy(() -> parse("alter table tb1 drop e.f2")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f2 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 drop a")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `a` is referenced by computed column `d`."); assertThatThrownBy(() -> parse("alter table tb1 drop c")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `c` is used as the primary key."); assertThatThrownBy(() -> parse("alter table tb1 drop ts")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The column `ts` is referenced by watermark expression."); } @Test public void testAlterTableDropColumn() throws Exception { prepareNonManagedTable(false); Operation operation = parse("alter table tb1 drop c"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()).isEqualTo("ALTER TABLE cat1.db1.tb1\n DROP `c`"); assertThat( ((AlterTableChangeOperation) operation) .getNewTable().getUnresolvedSchema().getColumns().stream() .map(Schema.UnresolvedColumn::getName) .collect(Collectors.toList())) .doesNotContain("c"); operation = parse("alter table tb1 drop (f, e, b, d)"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " DROP `d`,\n" + " DROP `f`,\n" + " DROP `b`,\n" + " DROP `e`"); assertThat( ((AlterTableChangeOperation) operation) .getNewTable().getUnresolvedSchema().getColumns().stream() .map(Schema.UnresolvedColumn::getName) .collect(Collectors.toList())) .doesNotContain("f", "e", "b", "d"); } @Test public void testFailedToAlterTableDropConstraint() throws Exception { prepareNonManagedTable("tb1", 0); assertThatThrownBy(() -> parse("alter table tb1 drop primary key")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The base table does not define any primary key."); assertThatThrownBy(() -> parse("alter table tb1 drop constraint ct")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The base table does not define any primary key."); prepareNonManagedTable("tb2", 1); assertThatThrownBy(() -> parse("alter table tb2 drop constraint ct2")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table does not define a primary key constraint named 'ct2'. Available constraint name: ['ct1']."); } @Test @Test public void testFailedToAlterTableDropWatermark() throws Exception { prepareNonManagedTable("tb1", false); assertThatThrownBy(() -> parse("alter table tb1 drop watermark")) .isInstanceOf(ValidationException.class) .hasMessageContaining("The base table does not define any watermark strategy."); } @Test public void testAlterTableDropWatermark() throws Exception { prepareNonManagedTable("tb1", true); Operation operation = parse("alter table tb1 drop watermark"); assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); assertThat(operation.asSummaryString()) .isEqualTo("ALTER TABLE cat1.db1.tb1\n DROP WATERMARK"); assertThat( ((AlterTableChangeOperation) operation) .getNewTable() .getUnresolvedSchema() .getWatermarkSpecs()) .isEmpty(); } @Test public void testAlterTableCompactOnNonManagedTable() throws Exception { prepareNonManagedTable(false); assertThatThrownBy(() -> parse("alter table tb1 compact")) .isInstanceOf(ValidationException.class) .hasMessage( "ALTER TABLE COMPACT operation is not supported for non-managed table `cat1`.`db1`.`tb1`"); } @Test public void testAlterTableCompactOnManagedNonPartitionedTable() throws Exception { prepareManagedTable(false); assertThatThrownBy(() -> parse("alter table tb1 partition(dt = 'a') compact")) .isInstanceOf(ValidationException.class) .hasMessage( "Partition column 'dt' not defined in the table schema. Table `cat1`.`db1`.`tb1` is not partitioned."); assertThatThrownBy(() -> parse("alter table tb2 compact")) .isInstanceOf(ValidationException.class) .hasMessage("Table `cat1`.`db1`.`tb2` doesn't exist or is a temporary table."); checkAlterTableCompact(parse("alter table tb1 compact"), Collections.emptyMap()); } @Test public void testAlterTableCompactOnManagedPartitionedTable() throws Exception { prepareManagedTable(true); assertThatThrownBy(() -> parse("alter table tb1 partition (dt = 'a') compact")) .isInstanceOf(ValidationException.class) .hasMessage( "Partition column 'dt' not defined in the table schema. Available ordered partition columns: ['b', 'c']"); Map<String, String> staticPartitions = new HashMap<>(); staticPartitions.put("b", "0"); staticPartitions.put("c", "flink"); checkAlterTableCompact( parse("alter table tb1 partition (b = 0, c = 'flink') compact"), staticPartitions); staticPartitions = Collections.singletonMap("b", "0"); checkAlterTableCompact( parse("alter table tb1 partition (b = 0) compact"), staticPartitions); staticPartitions = Collections.singletonMap("c", "flink"); checkAlterTableCompact( parse("alter table tb1 partition (c = 'flink') compact"), staticPartitions); staticPartitions = Collections.emptyMap(); checkAlterTableCompact(parse("alter table tb1 compact"), staticPartitions); } @Test public void testFailedToAlterTableAddColumn() throws Exception { prepareNonManagedTable("tb1", 0); assertThatThrownBy(() -> parse("alter table tb1 add a bigint")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Try to add a column `a` which already exists in the table."); assertThatThrownBy(() -> parse("alter table tb1 add (x array<string>, x string)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Encounter duplicate column `x`."); assertThatThrownBy(() -> parse("alter table tb1 add x bigint after y")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Referenced column `y` by 'AFTER' does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 add (x bigint after y, y string first)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Referenced column `y` by 'AFTER' does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 add m as n + 2")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'm'."); assertThatThrownBy(() -> parse("alter table tb1 add (m as b * 2, n as m + 2)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'n'."); assertThatThrownBy(() -> parse("alter table tb1 add (m as 'hello' || b)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'm'."); assertThatThrownBy(() -> parse("alter table tb1 add (e.f3 string)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f3 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 add (x string after e.f2)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type is not supported yet."); assertThatThrownBy(() -> parse("alter table tb1 add (e.f3 string after e.f1)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f3 is not supported yet."); } @Test public void testAlterTableAddColumn() throws Exception { prepareNonManagedTable("tb1", 0); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); Operation operation = parse("alter table tb1 add h double not null comment 'h is double not null'"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `h` DOUBLE NOT NULL COMMENT 'h is double not null' "); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("h", DataTypes.DOUBLE().notNull()) .withComment("h is double not null") .build()); operation = parse( "alter table tb1 add (\n" + " h as e.f2.f1 first,\n" + " i as b*2 after b,\n" + " j int metadata from 'mk1' virtual comment 'comment_metadata' first,\n" + " k string primary key not enforced after h)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `h` ARRAY<FLOAT> AS `e`.`f2`.`f1` FIRST,\n" + " ADD `i` BIGINT NOT NULL AS `b` * 2 AFTER `b`,\n" + " ADD `j` INT METADATA FROM 'mk1' VIRTUAL COMMENT 'comment_metadata' FIRST,\n" + " ADD `k` STRING NOT NULL AFTER `h`,\n" + " ADD CONSTRAINT `PK_k` PRIMARY KEY (`k`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .columnByMetadata("j", DataTypes.INT(), "mk1", true) .withComment("comment_metadata") .columnByExpression("h", "`e`.`f2`.`f1`") .column("k", DataTypes.STRING().notNull()) .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .columnByExpression("i", new SqlCallExpression("`b` * 2")) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .primaryKey("k") .build()); operation = parse( "alter table tb1 add (\n" + " r row<r1 bigint, r2 string, r3 array<double> not null> not null comment 'add composite type',\n" + " m map<string not null, int not null>,\n" + " n as r.r1 * 2 after r,\n" + " tss as to_timestamp(r.r2) comment 'rowtime' after ts,\n" + " na as r.r3 after ts)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `r` ROW<`r1` BIGINT, `r2` STRING, `r3` ARRAY<DOUBLE> NOT NULL> NOT NULL COMMENT 'add composite type' ,\n" + " ADD `m` MAP<STRING NOT NULL, INT NOT NULL> ,\n" + " ADD `n` BIGINT AS `r`.`r1` * 2 AFTER `r`,\n" + " ADD `tss` TIMESTAMP(3) AS `to_timestamp`(`r`.`r2`) COMMENT 'rowtime' AFTER `ts`,\n" + " ADD `na` ARRAY<DOUBLE> NOT NULL AS `r`.`r3` AFTER `ts`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .columnByExpression("na", "`r`.`r3`") .columnByExpression("tss", "`to_timestamp`(`r`.`r2`)") .withComment("rowtime") .column( "r", DataTypes.ROW( DataTypes.FIELD("r1", DataTypes.BIGINT()), DataTypes.FIELD("r2", DataTypes.STRING()), DataTypes.FIELD( "r3", DataTypes.ARRAY(DataTypes.DOUBLE()) .notNull())) .notNull()) .withComment("add composite type") .columnByExpression("n", "`r`.`r1` * 2") .column( "m", DataTypes.MAP( DataTypes.STRING().notNull(), DataTypes.INT().notNull())) .build()); } @Test public void testFailedToAlterTableAddPk() throws Exception { prepareNonManagedTable("tb1", 1); assertThatThrownBy(() -> parse("alter table tb1 add primary key(c) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`]. " + "You might want to drop it before adding a new one."); assertThatThrownBy( () -> parse( "alter table tb1 add x string not null primary key not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`]. " + "You might want to drop it before adding a new one"); prepareNonManagedTable("tb2", 2); assertThatThrownBy(() -> parse("alter table tb2 add primary key(c) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`, `b`]. " + "You might want to drop it before adding a new one"); assertThatThrownBy( () -> parse( "alter table tb2 add x string not null primary key not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the primary key constraint [`a`, `b`]. " + "You might want to drop it before adding a new one"); prepareNonManagedTable("tb3", 0); assertThatThrownBy(() -> parse("alter table tb3 add primary key (x) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid primary key 'PK_x'. Column 'x' does not exist."); assertThatThrownBy(() -> parse("alter table tb3 add unique(b)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("UNIQUE constraint is not supported yet"); assertThatThrownBy(() -> parse("alter table tb3 add primary key(b)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Flink doesn't support ENFORCED mode for PRIMARY KEY constraint"); assertThatThrownBy( () -> parse( "alter table tb3 add (\n" + " x as upper(c),\n" + " primary key (d, x) not enforced)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'PK_d_x'. Column 'd' is not a physical column."); assertThatThrownBy(() -> parse("alter table tb3 add (primary key (g) not enforced)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'PK_g'. Column 'g' is not a physical column."); } @Test public void testAlterTableAddPrimaryKey() throws Exception { prepareNonManagedTable("tb1", 0); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); Operation operation = parse("alter table tb1 add constraint my_pk primary key (a, b) not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD CONSTRAINT `my_pk` PRIMARY KEY (`a`, `b`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .primaryKeyNamed("my_pk", "a", "b") .build()); operation = parse("alter table tb1 add x bigint not null primary key not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `x` BIGINT NOT NULL ,\n" + " ADD CONSTRAINT `PK_x` PRIMARY KEY (`x`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("x", DataTypes.BIGINT().notNull()) .primaryKey("x") .build()); operation = parse("alter table tb1 add x bigint primary key not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `x` BIGINT NOT NULL ,\n" + " ADD CONSTRAINT `PK_x` PRIMARY KEY (`x`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("x", DataTypes.BIGINT().notNull()) .primaryKey("x") .build()); operation = parse("alter table tb1 add constraint ct primary key(ts) not enforced"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD CONSTRAINT `ct` PRIMARY KEY (`ts`) NOT ENFORCED"); List<Schema.UnresolvedColumn> subColumns = originalSchema.getColumns().subList(0, originalSchema.getColumns().size() - 1); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromColumns(subColumns) .column("ts", DataTypes.TIMESTAMP(3).notNull()) .withComment("just a comment") .primaryKeyNamed("ct", "ts") .build()); } @Test public void testFailedToAlterTableAddWatermark() throws Exception { prepareNonManagedTable("tb1", false); assertThatThrownBy(() -> parse("alter table tb1 add watermark for x as x")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid column name 'x' for rowtime attribute in watermark declaration. " + "Available columns are: [a, b, c, d, e, f, g, ts]"); assertThatThrownBy(() -> parse("alter table tb1 add watermark for b as b")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), " + "the supported precision 'p' is from 0 to 3, but the time field type is BIGINT NOT NULL"); assertThatThrownBy( () -> parse( "alter table tb1 add (x row<f0 string, f1 timestamp(3)>, watermark for x.f1 as x.f1)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Watermark strategy on nested column is not supported yet."); prepareNonManagedTable("tb2", true); assertThatThrownBy(() -> parse("alter table tb2 add watermark for ts as ts")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table has already defined the watermark strategy " + "`ts` AS ts - interval '5' seconds. " + "You might want to drop it before adding a new one."); } @Test public void testAlterTableAddWatermark() throws Exception { prepareNonManagedTable("tb1", false); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); Operation operation = parse("alter table tb1 add watermark for ts as ts"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD WATERMARK FOR `ts`: TIMESTAMP(3) AS `ts`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder().fromSchema(originalSchema).watermark("ts", "`ts`").build()); operation = parse("alter table tb1 add (tss timestamp(3) not null, watermark for tss as tss)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `tss` TIMESTAMP(3) NOT NULL ,\n" + " ADD WATERMARK FOR `tss`: TIMESTAMP(3) NOT NULL AS `tss`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("tss", DataTypes.TIMESTAMP(3).notNull()) .watermark("tss", "`tss`") .build()); operation = parse( "alter table tb1 add (log_ts string not null,\n" + "tss as to_timestamp(log_ts),\n" + "watermark for tss as tss - interval '3' second)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `log_ts` STRING NOT NULL ,\n" + " ADD `tss` TIMESTAMP(3) AS `to_timestamp`(`log_ts`) ,\n" + " ADD WATERMARK FOR `tss`: TIMESTAMP(3) AS `tss` - INTERVAL '3' SECOND"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column("log_ts", DataTypes.STRING().notNull()) .columnByExpression("tss", "`to_timestamp`(`log_ts`)") .watermark("tss", "`tss` - INTERVAL '3' SECOND") .build()); operation = parse( "alter table tb1 add (x row<f0 string, f1 timestamp(3) not null> not null, " + "y as x.f1, watermark for y as y - interval '1' day)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " ADD `x` ROW<`f0` STRING, `f1` TIMESTAMP(3) NOT NULL> NOT NULL ,\n" + " ADD `y` TIMESTAMP(3) NOT NULL AS `x`.`f1` ,\n" + " ADD WATERMARK FOR `y`: TIMESTAMP(3) NOT NULL AS `y` - INTERVAL '1' DAY"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromSchema(originalSchema) .column( "x", DataTypes.ROW(DataTypes.STRING(), DataTypes.TIMESTAMP(3).notNull()) .notNull()) .columnByExpression("y", "`x`.`f1`") .watermark("y", "`y` - INTERVAL '1' DAY") .build()); } @Test public void testFailedToAlterTableModifyColumn() throws Exception { prepareNonManagedTable("tb1", true); assertThatThrownBy(() -> parse("alter table tb1 modify (b int, b array<int not null>)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Encounter duplicate column `b`."); assertThatThrownBy(() -> parse("alter table tb1 modify x bigint")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Try to modify a column `x` which does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 modify a bigint after x")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Referenced column `x` by 'AFTER' does not exist in the table."); assertThatThrownBy(() -> parse("alter table tb1 modify e array<int>")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'f'."); assertThatThrownBy(() -> parse("alter table tb1 modify a string")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'd'."); assertThatThrownBy(() -> parse("alter table tb1 modify b as a + 2")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'd'."); assertThatThrownBy(() -> parse("alter table tb1 modify (a timestamp(3), b multiset<int>)")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid expression for computed column 'd'."); assertThatThrownBy(() -> parse("alter table tb1 modify ts int")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), " + "the supported precision 'p' is from 0 to 3, but the time field type is INT"); prepareNonManagedTable("tb2", 1); assertThatThrownBy(() -> parse("alter table tb2 modify (d int, a as b + 2)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct1'. Column 'a' is not a physical column."); assertThatThrownBy(() -> parse("alter table tb2 modify (d string, a int metadata virtual)")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct1'. Column 'a' is not a physical column."); assertThatThrownBy(() -> parse("alter table tb2 modify (e.f0 string)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f0 is not supported yet."); assertThatThrownBy(() -> parse("alter table tb2 modify (g string after e.f2)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type is not supported yet."); assertThatThrownBy(() -> parse("alter table tb2 modify (e.f0 string after e.f1)")) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining("Alter nested row type e.f0 is not supported yet."); } @Test public void testAlterTableModifyColumn() throws Exception { prepareNonManagedTable("tb1", 2); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Operation operation = parse( "alter table tb1 modify b bigint not null comment 'move b to first and add comment' first"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " MODIFY `b` COMMENT 'move b to first and add comment',\n" + " MODIFY `b` FIRST"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("b", DataTypes.BIGINT().notNull()) .withComment("move b to first and add comment") .column("a", DataTypes.INT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .primaryKeyNamed("ct1", "a", "b") .build()); operation = parse("alter table tb1 modify ts timestamp(3) not null after e"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " MODIFY `ts` TIMESTAMP(3) NOT NULL,\n" + " MODIFY `ts` AFTER `e`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .column("ts", DataTypes.TIMESTAMP(3).notNull()) .withComment("just a comment") .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .primaryKeyNamed("ct1", "a", "b") .build()); operation = parse( "alter table tb1 modify (\n" + "d as a + 2 comment 'change d' after b,\n" + "c bigint first,\n" + "e string comment 'change e',\n" + "f as upper(e) comment 'change f' after ts,\n" + "g int not null comment 'change g',\n" + "constraint ct2 primary key(e) not enforced)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb1\n" + " MODIFY `d` INT NOT NULL AS `a` + 2 COMMENT 'change d' AFTER `b`,\n" + " MODIFY `c` BIGINT,\n" + " MODIFY `c` FIRST,\n" + " MODIFY `e` COMMENT 'change e',\n" + " MODIFY `e` STRING NOT NULL,\n" + " MODIFY `f` STRING NOT NULL AS UPPER(`e`) COMMENT 'change f' AFTER `ts`,\n" + " MODIFY `g` INT NOT NULL COMMENT 'change g' ,\n" + " MODIFY CONSTRAINT `ct2` PRIMARY KEY (`e`) NOT ENFORCED"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("c", DataTypes.BIGINT()) .withComment("column comment") .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .columnByExpression("d", "`a` + 2") .withComment("change d") .column("e", DataTypes.STRING().notNull()) .withComment("change e") .column("g", DataTypes.INT().notNull()) .withComment("change g") .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .columnByExpression("f", "UPPER(`e`)") .withComment("change f") .primaryKeyNamed("ct2", "e") .build()); prepareNonManagedTable("tb2", true); tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2"); operation = parse( "alter table tb2 modify (ts int comment 'change ts',\n" + "f timestamp(3) not null,\n" + "e int metadata virtual,\n" + "watermark for f as f,\n" + "g multiset<int> not null comment 'change g' first)"); assertThat(operation.asSummaryString()) .isEqualTo( "ALTER TABLE cat1.db1.tb2\n" + " MODIFY `ts` COMMENT 'change ts',\n" + " MODIFY `ts` INT,\n" + " MODIFY `f` TIMESTAMP(3) NOT NULL ,\n" + " MODIFY `e` INT METADATA VIRTUAL ,\n" + " MODIFY `g` MULTISET<INT> NOT NULL COMMENT 'change g' FIRST,\n" + " MODIFY WATERMARK FOR `f`: TIMESTAMP(3) NOT NULL AS `f`"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .column("g", DataTypes.MULTISET(DataTypes.INT()).notNull()) .withComment("change g") .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .columnByMetadata("e", DataTypes.INT(), null, true) .column("f", DataTypes.TIMESTAMP(3).notNull()) .column("ts", DataTypes.INT()) .withComment("change ts") .watermark("f", "`f`") .build()); } @Test public void testFailedToAlterTableModifyPk() throws Exception { prepareNonManagedTable("tb1", 0); assertThatThrownBy( () -> parse( "alter table tb1 modify constraint ct primary key (b) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table does not define any primary key constraint. You might want to add a new one."); prepareNonManagedTable("tb2", 1); assertThatThrownBy( () -> parse( "alter table tb2 modify constraint ct primary key (x) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid primary key 'ct'. Column 'x' does not exist."); assertThatThrownBy( () -> parse( "alter table tb2 modify constraint ct primary key (d) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct'. Column 'd' is not a physical column."); assertThatThrownBy( () -> parse( "alter table tb2 modify constraint ct primary key (g) not enforced")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid primary key 'ct'. Column 'g' is not a physical column."); } @Test public void testAlterTableModifyPk() throws Exception { prepareNonManagedTable("tb1", 1); Operation operation = parse("alter table tb1 modify constraint ct2 primary key (a, b) not enforced"); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); assertAlterTableSchema( operation, ObjectIdentifier.of("cat1", "db1", "tb1"), Schema.newBuilder() .fromColumns(originalSchema.getColumns()) .primaryKeyNamed("ct2", "a", "b") .build()); operation = parse("alter table tb1 modify primary key (c, a) not enforced"); assertAlterTableSchema( operation, ObjectIdentifier.of("cat1", "db1", "tb1"), Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .primaryKeyNamed("PK_c_a", "c", "a") .build()); } @Test public void testFailedToAlterTableModifyWatermark() throws Exception { prepareNonManagedTable("tb1", false); assertThatThrownBy( () -> parse( "alter table tb1 modify watermark for a as to_timestamp(a) - interval '1' minute")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The base table does not define any watermark. You might want to add a new one."); prepareNonManagedTable("tb2", true); assertThatThrownBy(() -> parse("alter table tb2 modify watermark for a as a")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), the supported precision 'p' is from 0 to 3, " + "but the time field type is INT NOT NULL"); assertThatThrownBy( () -> parse( "alter table tb2 modify watermark for c as to_timestamp(c) - interval '1' day")) .isInstanceOf(ValidationException.class) .hasMessageContaining( "Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p), the supported precision 'p' is from 0 to 3, " + "but the time field type is STRING"); } @Test public void testAlterTableModifyWatermark() throws Exception { prepareNonManagedTable("tb1", true); Operation operation = parse("alter table tb1 modify watermark for ts as ts"); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); Schema originalSchema = catalogManager.getTable(tableIdentifier).get().getTable().getUnresolvedSchema(); List<Schema.UnresolvedColumn> columns = originalSchema.getColumns(); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder().fromColumns(columns).watermark("ts", "`ts`").build()); operation = parse("alter table tb1 modify (g timestamp(3) not null, watermark for g as g)"); assertAlterTableSchema( operation, tableIdentifier, Schema.newBuilder() .fromColumns(columns.subList(0, columns.size() - 2)) .column("g", DataTypes.TIMESTAMP(3).notNull()) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment") .watermark("g", "`g`") .build()); } @Test public void testCreateViewWithMatchRecognize() { Map<String, String> prop = new HashMap<>(); prop.put("connector", "values"); prop.put("bounded", "true"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("id", DataTypes.INT().notNull()) .column("measurement", DataTypes.BIGINT().notNull()) .column( "ts", DataTypes.ROW( DataTypes.FIELD("tmstmp", DataTypes.TIMESTAMP(3)))) .build(), null, Collections.emptyList(), prop); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "events"), false); final String sql = "" + "CREATE TEMPORARY VIEW foo AS " + "SELECT * " + "FROM events MATCH_RECOGNIZE (" + " PARTITION BY id " + " ORDER BY ts ASC " + " MEASURES " + " next_step.measurement - this_step.measurement AS diff " + " AFTER MATCH SKIP TO NEXT ROW " + " PATTERN (this_step next_step)" + " DEFINE " + " this_step AS TRUE," + " next_step AS TRUE" + ")"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(CreateViewOperation.class); } @Test public void testCreateViewWithDynamicTableOptions() { Map<String, String> prop = new HashMap<>(); prop.put("connector", "values"); prop.put("bounded", "true"); CatalogTable catalogTable = CatalogTable.of( Schema.newBuilder() .column("f0", DataTypes.INT()) .column("f1", DataTypes.VARCHAR(20)) .build(), null, Collections.emptyList(), prop); catalogManager.createTable( catalogTable, ObjectIdentifier.of("builtin", "default", "sourceA"), false); final String sql = "" + "create view test_view as\n" + "select *\n" + "from sourceA /*+ OPTIONS('changelog-mode'='I') */"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(CreateViewOperation.class); } @Test public void testBeginStatementSet() { final String sql = "BEGIN STATEMENT SET"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(BeginStatementSetOperation.class); final BeginStatementSetOperation beginStatementSetOperation = (BeginStatementSetOperation) operation; assertThat(beginStatementSetOperation.asSummaryString()).isEqualTo("BEGIN STATEMENT SET"); } @Test public void testEnd() { final String sql = "END"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(EndStatementSetOperation.class); final EndStatementSetOperation endStatementSetOperation = (EndStatementSetOperation) operation; assertThat(endStatementSetOperation.asSummaryString()).isEqualTo("END"); } @Test public void testSqlRichExplainWithSelect() { final String sql = "explain plan for select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(ExplainOperation.class); } @Test public void testSqlRichExplainWithInsert() { final String sql = "explain plan for insert into t1 select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(ExplainOperation.class); } @Test public void testSqlRichExplainWithStatementSet() { final String sql = "explain plan for statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(ExplainOperation.class); } @Test public void testExplainDetailsWithSelect() { final String sql = "explain estimated_cost, changelog_mode select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertExplainDetails(parse(sql, planner, parser)); } @Test public void testExplainDetailsWithInsert() { final String sql = "explain estimated_cost, changelog_mode insert into t1 select a, b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertExplainDetails(parse(sql, planner, parser)); } @Test public void testExplainDetailsWithStatementSet() { final String sql = "explain estimated_cost, changelog_mode statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); assertExplainDetails(parse(sql, planner, parser)); } private void assertExplainDetails(Operation operation) { Set<String> expectedDetail = new HashSet<>(); expectedDetail.add(ExplainDetail.ESTIMATED_COST.toString()); expectedDetail.add(ExplainDetail.CHANGELOG_MODE.toString()); assertThat(operation) .asInstanceOf(type(ExplainOperation.class)) .satisfies( explain -> assertThat(explain.getExplainDetails()).isEqualTo(expectedDetail)); } @Test public void testSqlExecuteWithStatementSet() { final String sql = "execute statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(StatementSetOperation.class); } @Test public void testSqlExecuteWithInsert() { final String sql = "execute insert into t1 select a, b, c, d from t2 where a > 1"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(SinkModifyOperation.class); } @Test public void testSqlExecuteWithSelect() { final String sql = "execute select a, b, c, d from t2 where a > 1"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assertThat(operation).isInstanceOf(QueryOperation.class); } @Test public void testAddJar() { Arrays.asList( "./test.\njar", "file: "../test-jar.jar", "/root/test.jar", "test\\ jar.jar", "oss: .forEach( jarPath -> { AddJarOperation operation = (AddJarOperation) parser.parse(String.format("ADD JAR '%s'", jarPath)) .get(0); assertThat(operation.getPath()).isEqualTo(jarPath); }); } @Test public void testRemoveJar() { Arrays.asList( "./test.\njar", "file: "../test-jar.jar", "/root/test.jar", "test\\ jar.jar", "oss: .forEach( jarPath -> { RemoveJarOperation operation = (RemoveJarOperation) parser.parse(String.format("REMOVE JAR '%s'", jarPath)) .get(0); assertThat(operation.getPath()).isEqualTo(jarPath); }); } @Test public void testShowJars() { final String sql = "SHOW JARS"; Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowJarsOperation.class); final ShowJarsOperation showModulesOperation = (ShowJarsOperation) operation; assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW JARS"); } @Test public void testSet() { Operation operation1 = parse("SET"); assertThat(operation1).isInstanceOf(SetOperation.class); SetOperation setOperation1 = (SetOperation) operation1; assertThat(setOperation1.getKey()).isNotPresent(); assertThat(setOperation1.getValue()).isNotPresent(); Operation operation2 = parse("SET 'test-key' = 'test-value'"); assertThat(operation2).isInstanceOf(SetOperation.class); SetOperation setOperation2 = (SetOperation) operation2; assertThat(setOperation2.getKey()).hasValue("test-key"); assertThat(setOperation2.getValue()).hasValue("test-value"); } @Test public void testReset() { Operation operation1 = parse("RESET"); assertThat(operation1).isInstanceOf(ResetOperation.class); assertThat(((ResetOperation) operation1).getKey()).isNotPresent(); Operation operation2 = parse("RESET 'test-key'"); assertThat(operation2).isInstanceOf(ResetOperation.class); assertThat(((ResetOperation) operation2).getKey()).isPresent(); assertThat(((ResetOperation) operation2).getKey()).hasValue("test-key"); } @ParameterizedTest @ValueSource(strings = {"SET", "SET;", "SET ;", "SET\t;", "SET\n;"}) public void testSetCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(SetOperation.class); } @ParameterizedTest @ValueSource(strings = {"HELP", "HELP;", "HELP ;", "HELP\t;", "HELP\n;"}) public void testHelpCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(HelpOperation.class); } @ParameterizedTest @ValueSource(strings = {"CLEAR", "CLEAR;", "CLEAR ;", "CLEAR\t;", "CLEAR\n;"}) public void testClearCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(ClearOperation.class); } @ParameterizedTest @ValueSource( strings = { "QUIT;", "QUIT;", "QUIT ;", "QUIT\t;", "QUIT\n;", "EXIT;", "EXIT ;", "EXIT\t;", "EXIT\n;", "EXIT ; " }) public void testQuitCommands(String command) { ExtendedParser extendedParser = new ExtendedParser(); assertThat(extendedParser.parse(command)).get().isInstanceOf(QuitOperation.class); } private static TestItem createTestItem(Object... args) { assertThat(args).hasSize(2); final String testExpr = (String) args[0]; TestItem testItem = TestItem.fromTestExpr(testExpr); if (args[1] instanceof String) { testItem.withExpectedError((String) args[1]); } else { testItem.withExpectedType(args[1]); } return testItem; } private void checkExplainSql(String sql) { FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assertThat(node).isInstanceOf(SqlRichExplain.class); Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assertThat(operation).isInstanceOf(ExplainOperation.class); } private void assertShowFunctions( String sql, String expectedSummary, FunctionScope expectedScope) { Operation operation = parse(sql); assertThat(operation).isInstanceOf(ShowFunctionsOperation.class); final ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation; assertThat(showFunctionsOperation.getFunctionScope()).isEqualTo(expectedScope); assertThat(showFunctionsOperation.asSummaryString()).isEqualTo(expectedSummary); } private void assertAlterTableOptions( Operation operation, ObjectIdentifier expectedIdentifier, Map<String, String> expectedOptions, List<TableChange> expectedChanges, String expectedSummary) { assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); final AlterTableChangeOperation alterTableOptionsOperation = (AlterTableChangeOperation) operation; assertThat(alterTableOptionsOperation.getTableIdentifier()).isEqualTo(expectedIdentifier); assertThat(alterTableOptionsOperation.getNewTable().getOptions()) .isEqualTo(expectedOptions); assertThat(expectedChanges).isEqualTo(alterTableOptionsOperation.getTableChanges()); assertThat(alterTableOptionsOperation.asSummaryString()).isEqualTo(expectedSummary); } private void assertAlterTableSchema( Operation operation, ObjectIdentifier expectedIdentifier, Schema expectedSchema) { assertThat(operation).isInstanceOf(AlterTableChangeOperation.class); final AlterTableChangeOperation alterTableChangeOperation = (AlterTableChangeOperation) operation; assertThat(alterTableChangeOperation.getTableIdentifier()).isEqualTo(expectedIdentifier); assertThat(alterTableChangeOperation.getNewTable().getUnresolvedSchema()) .isEqualTo(expectedSchema); } private Operation parse(String sql, FlinkPlannerImpl planner, CalciteParser parser) { SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } private Operation parse(String sql) { FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } private void prepareNonManagedTable(boolean hasConstraint) throws Exception { prepareNonManagedTable("tb1", hasConstraint ? 1 : 0); } private void prepareNonManagedTable(String tableName, int numOfPkFields) throws Exception { prepareTable(tableName, false, false, false, numOfPkFields); } private void prepareNonManagedTable(String tableName, boolean hasWatermark) throws Exception { prepareTable(tableName, false, false, hasWatermark, 0); } private void prepareManagedTable(boolean hasPartition) throws Exception { TestManagedTableFactory.MANAGED_TABLES.put( ObjectIdentifier.of("cat1", "db1", "tb1"), new AtomicReference<>()); prepareTable("tb1", true, hasPartition, false, 0); } private void prepareTable( String tableName, boolean managedTable, boolean hasPartition, boolean hasWatermark, int numOfPkFields) throws Exception { Catalog catalog = new GenericInMemoryCatalog("default", "default"); if (!catalogManager.getCatalog("cat1").isPresent()) { catalogManager.registerCatalog("cat1", catalog); } catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true); Schema.Builder builder = Schema.newBuilder() .column("a", DataTypes.INT().notNull()) .column("b", DataTypes.BIGINT().notNull()) .column("c", DataTypes.STRING().notNull()) .withComment("column comment") .columnByExpression("d", "a*(b+2 + a*b)") .column( "e", DataTypes.ROW( DataTypes.STRING(), DataTypes.INT(), DataTypes.ROW( DataTypes.DOUBLE(), DataTypes.ARRAY(DataTypes.FLOAT())))) .columnByExpression("f", "e.f1 + e.f2.f0") .columnByMetadata("g", DataTypes.STRING(), null, true) .column("ts", DataTypes.TIMESTAMP(3)) .withComment("just a comment"); Map<String, String> options = new HashMap<>(); options.put("k", "v"); if (!managedTable) { options.put("connector", "dummy"); } if (numOfPkFields == 0) { } else if (numOfPkFields == 1) { builder.primaryKeyNamed("ct1", "a"); } else if (numOfPkFields == 2) { builder.primaryKeyNamed("ct1", "a", "b"); } else if (numOfPkFields == 3) { builder.primaryKeyNamed("ct1", "a", "b", "c"); } else { throw new IllegalArgumentException( String.format("Don't support to set pk with %s fields.", numOfPkFields)); } if (hasWatermark) { builder.watermark("ts", "ts - interval '5' seconds"); } CatalogTable catalogTable = CatalogTable.of( builder.build(), "a table", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options)); catalogManager.setCurrentCatalog("cat1"); catalogManager.setCurrentDatabase("db1"); ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", tableName); catalogManager.createTable(catalogTable, tableIdentifier, true); } private FlinkPlannerImpl getPlannerBySqlDialect(SqlDialect sqlDialect) { tableConfig.setSqlDialect(sqlDialect); return plannerContext.createFlinkPlanner(); } private CalciteParser getParserBySqlDialect(SqlDialect sqlDialect) { tableConfig.setSqlDialect(sqlDialect); return plannerContext.createCalciteParser(); } private void checkAlterTableCompact(Operation operation, Map<String, String> staticPartitions) { assertThat(operation).isInstanceOf(SinkModifyOperation.class); SinkModifyOperation modifyOperation = (SinkModifyOperation) operation; assertThat(modifyOperation.getStaticPartitions()) .containsExactlyInAnyOrderEntriesOf(staticPartitions); assertThat(modifyOperation.isOverwrite()).isFalse(); assertThat(modifyOperation.getDynamicOptions()) .containsEntry( TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE); ContextResolvedTable contextResolvedTable = modifyOperation.getContextResolvedTable(); assertThat(contextResolvedTable.getIdentifier()) .isEqualTo(ObjectIdentifier.of("cat1", "db1", "tb1")); assertThat(modifyOperation.getChild()).isInstanceOf(SourceQueryOperation.class); SourceQueryOperation child = (SourceQueryOperation) modifyOperation.getChild(); assertThat(child.getChildren()).isEmpty(); assertThat(child.getDynamicOptions()).containsEntry("k", "v"); assertThat(child.getDynamicOptions()) .containsEntry( TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE); } private static class TestItem { private final String testExpr; @Nullable private Object expectedType; @Nullable private String expectedError; private TestItem(String testExpr) { this.testExpr = testExpr; } static TestItem fromTestExpr(String testExpr) { return new TestItem(testExpr); } TestItem withExpectedType(Object expectedType) { this.expectedType = expectedType; return this; } TestItem withExpectedError(String expectedError) { this.expectedError = expectedError; return this; } @Override public String toString() { return this.testExpr; } } private Operation parseAndConvert(String sql) { final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } }
could you also verify that increasing the bytes would let something in similarly?
public void testRecordTotalTimeMaxActiveThreadsUsed() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch stop = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, stop); Runnable m2 = createSleepProcessWorkFn(processStart2, stop); Runnable m3 = createSleepProcessWorkFn(processStart3, stop); assertEquals(0, executor.activeCount()); assertEquals(2, executor.maximumThreadCount()); executor.execute(m1, 1); processStart1.await(); assertEquals(1, executor.activeCount()); executor.execute(m2, 1); processStart2.await(); assertEquals(2, executor.activeCount()); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); assertEquals(0l, executor.allThreadsActiveTime()); stop.countDown(); while (executor.activeCount() != 0) { } assertThat(executor.allThreadsActiveTime(), greaterThan(0l)); executor.shutdown(); }
assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS));
public void testRecordTotalTimeMaxActiveThreadsUsed() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch stop = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, stop); Runnable m2 = createSleepProcessWorkFn(processStart2, stop); Runnable m3 = createSleepProcessWorkFn(processStart3, stop); assertEquals(0, executor.activeCount()); assertEquals(2, executor.getMaximumPoolSize()); executor.execute(m1, 1); processStart1.await(); assertEquals(1, executor.activeCount()); executor.execute(m2, 1); processStart2.await(); assertEquals(2, executor.activeCount()); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); assertEquals(0l, executor.allThreadsActiveTime()); stop.countDown(); while (executor.activeCount() != 0) { Thread.sleep(200); } assertThat(executor.allThreadsActiveTime(), greaterThan(0l)); executor.shutdown(); }
class BoundedQueueExecutorTest { @Rule public transient Timeout globalTimeout = Timeout.seconds(300); private static final long MAXIMUM_BYTES_OUTSTANDING = 10000000; private static final int DEFAULT_MAX_THREADS = 2; private static final int DEFAULT_THREAD_EXPIRATION_SEC = 60; private BoundedQueueExecutor executor; private Runnable createSleepProcessWorkFn(CountDownLatch start, CountDownLatch stop) { Runnable runnable = () -> { start.countDown(); try { stop.await(); } catch (Exception e) { throw new RuntimeException(e); } }; return runnable; } @Before public void setUp() { this.executor = new BoundedQueueExecutor( DEFAULT_MAX_THREADS, DEFAULT_THREAD_EXPIRATION_SEC, TimeUnit.SECONDS, DEFAULT_MAX_THREADS + 100, MAXIMUM_BYTES_OUTSTANDING, new ThreadFactoryBuilder() .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build()); } @Test public void testScheduleWork() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStop1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStop2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch processStop3 = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, processStop1); Runnable m2 = createSleepProcessWorkFn(processStart2, processStop2); Runnable m3 = createSleepProcessWorkFn(processStart3, processStop3); executor.execute(m1, 1); assertTrue(processStart1.await(1000, TimeUnit.MILLISECONDS)); executor.execute(m2, 1); assertTrue(processStart2.await(1000, TimeUnit.MILLISECONDS)); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); processStop1.countDown(); assertTrue(processStart3.await(1000, TimeUnit.MILLISECONDS)); processStop2.countDown(); processStop3.countDown(); executor.shutdown(); } @Test public void testOverrideMaximumThreadCount() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch stop = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, stop); Runnable m2 = createSleepProcessWorkFn(processStart2, stop); Runnable m3 = createSleepProcessWorkFn(processStart3, stop); assertEquals(0, executor.activeCount()); assertEquals(2, executor.maximumThreadCount()); executor.execute(m1, 1); processStart1.await(); assertEquals(1, executor.activeCount()); executor.execute(m2, 1); processStart2.await(); assertEquals(2, executor.activeCount()); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); executor.setMaximumPoolSize(3, 103); assertEquals(3, executor.maximumThreadCount()); processStart3.await(); assertEquals(3, executor.activeCount()); stop.countDown(); executor.shutdown(); } @Test @Test public void testRecordTotalTimeMaxActiveThreadsUsedWhenMaximumThreadCountUpdated() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch stop = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, stop); Runnable m2 = createSleepProcessWorkFn(processStart2, stop); Runnable m3 = createSleepProcessWorkFn(processStart3, stop); assertEquals(0, executor.activeCount()); assertEquals(2, executor.maximumThreadCount()); executor.execute(m1, 1); processStart1.await(); assertEquals(1, executor.activeCount()); executor.execute(m2, 1); processStart2.await(); assertEquals(2, executor.activeCount()); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); assertEquals(0l, executor.allThreadsActiveTime()); executor.setMaximumPoolSize(5, 105); stop.countDown(); while (executor.activeCount() != 0) { } assertThat(executor.allThreadsActiveTime(), greaterThan(0l)); executor.shutdown(); } @Test public void testRenderSummaryHtml() throws Exception { String expectedSummaryHtml = "Worker Threads: 0/2<br>/n" + "Maximum Threads: 2<br>/n" + "Active Threads: 0<br>/n" + "Work Queue Size: 0/102<br>/n" + "Work Queue Bytes: 0/10000000<br>/n"; assertEquals(expectedSummaryHtml, executor.summaryHtml()); } }
class BoundedQueueExecutorTest { @Rule public transient Timeout globalTimeout = Timeout.seconds(300); private static final long MAXIMUM_BYTES_OUTSTANDING = 10000000; private static final int DEFAULT_MAX_THREADS = 2; private static final int DEFAULT_THREAD_EXPIRATION_SEC = 60; private BoundedQueueExecutor executor; private Runnable createSleepProcessWorkFn(CountDownLatch start, CountDownLatch stop) { Runnable runnable = () -> { start.countDown(); try { stop.await(); } catch (Exception e) { throw new RuntimeException(e); } }; return runnable; } @Before public void setUp() { this.executor = new BoundedQueueExecutor( DEFAULT_MAX_THREADS, DEFAULT_THREAD_EXPIRATION_SEC, TimeUnit.SECONDS, DEFAULT_MAX_THREADS + 100, MAXIMUM_BYTES_OUTSTANDING, new ThreadFactoryBuilder() .setNameFormat("DataflowWorkUnits-%d") .setDaemon(true) .build()); } @Test public void testScheduleWorkWhenExceedMaximumPoolSize() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStop1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStop2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch processStop3 = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, processStop1); Runnable m2 = createSleepProcessWorkFn(processStart2, processStop2); Runnable m3 = createSleepProcessWorkFn(processStart3, processStop3); executor.execute(m1, 1); processStart1.await(); executor.execute(m2, 1); processStart2.await(); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); assertFalse(executor.executorQueueIsEmpty()); processStop1.countDown(); processStart3.await(); assertTrue(executor.executorQueueIsEmpty()); processStop2.countDown(); processStop3.countDown(); executor.shutdown(); } @Test public void testScheduleWorkWhenExceedMaximumBytesOutstanding() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStop1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStop2 = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, processStop1); Runnable m2 = createSleepProcessWorkFn(processStart2, processStop2); executor.execute(m1, 10000000); processStart1.await(); Thread m2Runner = new Thread( () -> { executor.execute(m2, 1000); }); m2Runner.start(); assertFalse(processStart2.await(1000, TimeUnit.MILLISECONDS)); assertEquals(Thread.State.WAITING, m2Runner.getState()); assertTrue(executor.executorQueueIsEmpty()); processStop1.countDown(); processStart2.await(); assertEquals(Thread.State.TERMINATED, m2Runner.getState()); processStop2.countDown(); executor.shutdown(); } @Test public void testOverrideMaximumPoolSize() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch stop = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, stop); Runnable m2 = createSleepProcessWorkFn(processStart2, stop); Runnable m3 = createSleepProcessWorkFn(processStart3, stop); assertEquals(0, executor.activeCount()); assertEquals(2, executor.getMaximumPoolSize()); executor.execute(m1, 1); processStart1.await(); assertEquals(1, executor.activeCount()); executor.execute(m2, 1); processStart2.await(); assertEquals(2, executor.activeCount()); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); executor.setMaximumPoolSize(3, 103); assertEquals(3, executor.getMaximumPoolSize()); processStart3.await(); assertEquals(3, executor.activeCount()); stop.countDown(); executor.shutdown(); } @Test @Test public void testRecordTotalTimeMaxActiveThreadsUsedWhenMaximumPoolSizeUpdated() throws Exception { CountDownLatch processStart1 = new CountDownLatch(1); CountDownLatch processStart2 = new CountDownLatch(1); CountDownLatch processStart3 = new CountDownLatch(1); CountDownLatch stop = new CountDownLatch(1); Runnable m1 = createSleepProcessWorkFn(processStart1, stop); Runnable m2 = createSleepProcessWorkFn(processStart2, stop); Runnable m3 = createSleepProcessWorkFn(processStart3, stop); assertEquals(0, executor.activeCount()); assertEquals(2, executor.getMaximumPoolSize()); executor.execute(m1, 1); processStart1.await(); assertEquals(1, executor.activeCount()); executor.execute(m2, 1); processStart2.await(); assertEquals(2, executor.activeCount()); executor.execute(m3, 1); assertFalse(processStart3.await(1000, TimeUnit.MILLISECONDS)); assertEquals(0l, executor.allThreadsActiveTime()); executor.setMaximumPoolSize(5, 105); stop.countDown(); while (executor.activeCount() != 0) { Thread.sleep(200); } assertThat(executor.allThreadsActiveTime(), greaterThan(0l)); executor.shutdown(); } @Test public void testRenderSummaryHtml() throws Exception { String expectedSummaryHtml = "Worker Threads: 0/2<br>/n" + "Active Threads: 0<br>/n" + "Work Queue Size: 0/102<br>/n" + "Work Queue Bytes: 0/10000000<br>/n"; assertEquals(expectedSummaryHtml, executor.summaryHtml()); } }
```suggestion boolean isEnableMemtableOnSinkNode = ((OlapTable) table).getTableProperty().getUseSchemaLightChange() ? this.enableMemTableOnSinkNode: false; ```
private void createLoadingTask(Database db, BrokerPendingTaskAttachment attachment) throws UserException { List<Table> tableList = db.getTablesOnIdOrderOrThrowException( Lists.newArrayList(fileGroupAggInfo.getAllTableIds())); List<LoadLoadingTask> newLoadingTasks = Lists.newArrayList(); this.jobProfile = new Profile("BrokerLoadJob " + id + ". " + label, true); ProgressManager progressManager = Env.getCurrentProgressManager(); progressManager.registerProgressSimple(String.valueOf(id)); MetaLockUtils.readLockTables(tableList); try { for (Map.Entry<FileGroupAggKey, List<BrokerFileGroup>> entry : fileGroupAggInfo.getAggKeyToFileGroups().entrySet()) { FileGroupAggKey aggKey = entry.getKey(); List<BrokerFileGroup> brokerFileGroups = entry.getValue(); long tableId = aggKey.getTableId(); OlapTable table = (OlapTable) db.getTableNullable(tableId); boolean isEnableMemtableOnSinkNode = !((OlapTable) table).getTableProperty().getUseSchemaLightChange() ? false : this.enableMemTableOnSinkNode; LoadLoadingTask task = new LoadLoadingTask(db, table, brokerDesc, brokerFileGroups, getDeadlineMs(), getExecMemLimit(), isStrictMode(), isPartialUpdate(), transactionId, this, getTimeZone(), getTimeout(), getLoadParallelism(), getSendBatchParallelism(), getMaxFilterRatio() <= 0, enableProfile ? jobProfile : null, isSingleTabletLoadPerSink(), useNewLoadScanNode(), getPriority(), isEnableMemtableOnSinkNode); UUID uuid = UUID.randomUUID(); TUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); task.init(loadId, attachment.getFileStatusByTable(aggKey), attachment.getFileNumByTable(aggKey), getUserInfo()); idToTasks.put(task.getSignature(), task); newLoadingTasks.add(task); TransactionState txnState = Env.getCurrentGlobalTransactionMgr() .getTransactionState(dbId, transactionId); if (txnState == null) { throw new UserException("txn does not exist: " + transactionId); } txnState.addTableIndexes(table); if (isPartialUpdate()) { txnState.setSchemaForPartialUpdate(table); } } } finally { MetaLockUtils.readUnlockTables(tableList); } for (LoadTask loadTask : newLoadingTasks) { Env.getCurrentEnv().getLoadingLoadTaskScheduler().submit(loadTask); } }
? false : this.enableMemTableOnSinkNode;
private void createLoadingTask(Database db, BrokerPendingTaskAttachment attachment) throws UserException { List<Table> tableList = db.getTablesOnIdOrderOrThrowException( Lists.newArrayList(fileGroupAggInfo.getAllTableIds())); List<LoadLoadingTask> newLoadingTasks = Lists.newArrayList(); this.jobProfile = new Profile("BrokerLoadJob " + id + ". " + label, true); ProgressManager progressManager = Env.getCurrentProgressManager(); progressManager.registerProgressSimple(String.valueOf(id)); MetaLockUtils.readLockTables(tableList); try { for (Map.Entry<FileGroupAggKey, List<BrokerFileGroup>> entry : fileGroupAggInfo.getAggKeyToFileGroups().entrySet()) { FileGroupAggKey aggKey = entry.getKey(); List<BrokerFileGroup> brokerFileGroups = entry.getValue(); long tableId = aggKey.getTableId(); OlapTable table = (OlapTable) db.getTableNullable(tableId); boolean isEnableMemtableOnSinkNode = ((OlapTable) table).getTableProperty().getUseSchemaLightChange() ? this.enableMemTableOnSinkNode : false; LoadLoadingTask task = new LoadLoadingTask(db, table, brokerDesc, brokerFileGroups, getDeadlineMs(), getExecMemLimit(), isStrictMode(), isPartialUpdate(), transactionId, this, getTimeZone(), getTimeout(), getLoadParallelism(), getSendBatchParallelism(), getMaxFilterRatio() <= 0, enableProfile ? jobProfile : null, isSingleTabletLoadPerSink(), useNewLoadScanNode(), getPriority(), isEnableMemtableOnSinkNode); UUID uuid = UUID.randomUUID(); TUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); task.init(loadId, attachment.getFileStatusByTable(aggKey), attachment.getFileNumByTable(aggKey), getUserInfo()); idToTasks.put(task.getSignature(), task); newLoadingTasks.add(task); TransactionState txnState = Env.getCurrentGlobalTransactionMgr() .getTransactionState(dbId, transactionId); if (txnState == null) { throw new UserException("txn does not exist: " + transactionId); } txnState.addTableIndexes(table); if (isPartialUpdate()) { txnState.setSchemaForPartialUpdate(table); } } } finally { MetaLockUtils.readUnlockTables(tableList); } for (LoadTask loadTask : newLoadingTasks) { Env.getCurrentEnv().getLoadingLoadTaskScheduler().submit(loadTask); } }
class BrokerLoadJob extends BulkLoadJob { private static final Logger LOG = LogManager.getLogger(BrokerLoadJob.class); private Profile jobProfile; private boolean enableProfile = false; private boolean enableMemTableOnSinkNode = false; public BrokerLoadJob() { super(EtlJobType.BROKER); } public BrokerLoadJob(long dbId, String label, BrokerDesc brokerDesc, OriginStatement originStmt, UserIdentity userInfo) throws MetaNotFoundException { super(EtlJobType.BROKER, dbId, label, originStmt, userInfo); this.brokerDesc = brokerDesc; if (ConnectContext.get() != null) { enableProfile = ConnectContext.get().getSessionVariable().enableProfile(); enableMemTableOnSinkNode = ConnectContext.get().getSessionVariable().enableMemtableOnSinkNode; } } @Override public void beginTxn() throws LabelAlreadyUsedException, BeginTransactionException, AnalysisException, DuplicatedRequestException, QuotaExceedException, MetaNotFoundException { transactionId = Env.getCurrentGlobalTransactionMgr() .beginTransaction(dbId, Lists.newArrayList(fileGroupAggInfo.getAllTableIds()), label, null, new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), TransactionState.LoadJobSourceType.BATCH_LOAD_JOB, id, getTimeout()); } @Override protected void unprotectedExecuteJob() { LoadTask task = new BrokerLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(), brokerDesc, getPriority()); idToTasks.put(task.getSignature(), task); Env.getCurrentEnv().getPendingLoadTaskScheduler().submit(task); } /** * Situation1: When attachment is instance of BrokerPendingTaskAttachment, * this method is called by broker pending task. * LoadLoadingTask will be created after BrokerPendingTask is finished. * Situation2: When attachment is instance of BrokerLoadingTaskAttachment, this method is called by LoadLoadingTask. * CommitTxn will be called after all of LoadingTasks are finished. * * @param attachment */ @Override public void onTaskFinished(TaskAttachment attachment) { if (attachment instanceof BrokerPendingTaskAttachment) { onPendingTaskFinished((BrokerPendingTaskAttachment) attachment); } else if (attachment instanceof BrokerLoadingTaskAttachment) { onLoadingTaskFinished((BrokerLoadingTaskAttachment) attachment); } } /** * step1: divide job into loading task * step2: init the plan of task * step3: submit tasks into loadingTaskExecutor * @param attachment BrokerPendingTaskAttachment */ private void onPendingTaskFinished(BrokerPendingTaskAttachment attachment) { writeLock(); try { if (isTxnDone()) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("state", state) .add("error_msg", "this task will be ignored when job is: " + state) .build()); return; } if (finishedTaskIds.contains(attachment.getTaskId())) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("task_id", attachment.getTaskId()) .add("error_msg", "this is a duplicated callback of pending task " + "when broker already has loading task") .build()); return; } finishedTaskIds.add(attachment.getTaskId()); } finally { writeUnlock(); } try { Database db = getDb(); createLoadingTask(db, attachment); } catch (UserException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "Failed to divide job into loading task.") .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true); return; } catch (RejectedExecutionException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "the task queque is full.") .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true); return; } loadStartTimestamp = System.currentTimeMillis(); } private void onLoadingTaskFinished(BrokerLoadingTaskAttachment attachment) { writeLock(); try { if (isTxnDone()) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("state", state) .add("error_msg", "this task will be ignored when job is: " + state) .build()); return; } if (finishedTaskIds.contains(attachment.getTaskId())) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("task_id", attachment.getTaskId()) .add("error_msg", "this is a duplicated callback of loading task").build()); return; } finishedTaskIds.add(attachment.getTaskId()); updateLoadingStatus(attachment); if (finishedTaskIds.size() != idToTasks.size()) { return; } } finally { writeUnlock(); } if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.LOAD_JOB, id) .add("commit_infos", Joiner.on(",").join(commitInfos)) .build()); } if (!checkDataQuality()) { cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_QUALITY_UNSATISFIED, DataQualityException.QUALITY_FAIL_MSG), true, true); return; } Database db = null; List<Table> tableList = null; try { db = getDb(); tableList = db.getTablesOnIdOrderOrThrowException(Lists.newArrayList(fileGroupAggInfo.getAllTableIds())); MetaLockUtils.writeLockTablesOrMetaException(tableList); } catch (MetaNotFoundException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "db has been deleted when job is loading") .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true); return; } try { LOG.info(new LogBuilder(LogKey.LOAD_JOB, id) .add("txn_id", transactionId) .add("msg", "Load job try to commit txn") .build()); Env.getCurrentGlobalTransactionMgr().commitTransaction( dbId, tableList, transactionId, commitInfos, new LoadJobFinalOperation(id, loadingStatus, progress, loadStartTimestamp, finishTimestamp, state, failMsg)); } catch (UserException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "Failed to commit txn with error:" + e.getMessage()) .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true); } finally { MetaLockUtils.writeUnlockTables(tableList); } } private void writeProfile() { if (!enableProfile) { return; } jobProfile.update(createTimestamp, getSummaryInfo(true), true, Integer.valueOf(sessionVariables.getOrDefault(SessionVariable.PROFILE_LEVEL, "3")), null, false); } private Map<String, String> getSummaryInfo(boolean isFinished) { long currentTimestamp = System.currentTimeMillis(); SummaryBuilder builder = new SummaryBuilder(); builder.profileId(String.valueOf(id)); if (Version.DORIS_BUILD_VERSION_MAJOR == 0) { builder.dorisVersion(Version.DORIS_BUILD_SHORT_HASH); } else { builder.dorisVersion(Version.DORIS_BUILD_VERSION + "-" + Version.DORIS_BUILD_SHORT_HASH); } builder.taskType(ProfileType.LOAD.name()); builder.startTime(TimeUtils.longToTimeString(createTimestamp)); if (isFinished) { builder.endTime(TimeUtils.longToTimeString(currentTimestamp)); builder.totalTime(DebugUtil.getPrettyStringMs(currentTimestamp - createTimestamp)); } builder.taskState("FINISHED"); builder.user(getUserInfo() != null ? getUserInfo().getQualifiedUser() : "N/A"); builder.defaultDb(getDefaultDb()); builder.sqlStatement(getOriginStmt().originStmt); return builder.build(); } private String getDefaultDb() { Database database = Env.getCurrentEnv().getInternalCatalog().getDb(this.dbId).orElse(null); return database == null ? "N/A" : database.getFullName(); } private void updateLoadingStatus(BrokerLoadingTaskAttachment attachment) { loadingStatus.replaceCounter(DPP_ABNORMAL_ALL, increaseCounter(DPP_ABNORMAL_ALL, attachment.getCounter(DPP_ABNORMAL_ALL))); loadingStatus.replaceCounter(DPP_NORMAL_ALL, increaseCounter(DPP_NORMAL_ALL, attachment.getCounter(DPP_NORMAL_ALL))); loadingStatus.replaceCounter(UNSELECTED_ROWS, increaseCounter(UNSELECTED_ROWS, attachment.getCounter(UNSELECTED_ROWS))); if (attachment.getTrackingUrl() != null) { loadingStatus.setTrackingUrl(attachment.getTrackingUrl()); } commitInfos.addAll(attachment.getCommitInfoList()); errorTabletInfos.addAll(attachment.getErrorTabletInfos().stream().limit(Config.max_error_tablet_of_broker_load) .collect(Collectors.toList())); progress = (int) ((double) finishedTaskIds.size() / idToTasks.size() * 100); if (progress == 100) { progress = 99; } } @Override public void updateProgress(Long beId, TUniqueId loadId, TUniqueId fragmentId, long scannedRows, long scannedBytes, boolean isDone) { super.updateProgress(beId, loadId, fragmentId, scannedRows, scannedBytes, isDone); progress = (int) ((double) loadStatistic.getLoadBytes() / loadStatistic.totalFileSizeB * 100); if (progress >= 100) { progress = 99; } } private String increaseCounter(String key, String deltaValue) { long value = 0; if (loadingStatus.getCounters().containsKey(key)) { value = Long.valueOf(loadingStatus.getCounters().get(key)); } if (deltaValue != null) { value += Long.valueOf(deltaValue); } return String.valueOf(value); } @Override public void afterVisible(TransactionState txnState, boolean txnOperated) { super.afterVisible(txnState, txnOperated); writeProfile(); } @Override public String getResourceName() { StorageBackend.StorageType storageType = brokerDesc.getStorageType(); if (storageType == StorageBackend.StorageType.BROKER) { return brokerDesc.getName(); } else if (storageType == StorageBackend.StorageType.S3) { return Optional.ofNullable(brokerDesc.getProperties()) .map(o -> o.get(S3Properties.Env.ENDPOINT)) .orElse("s3_cluster"); } else { return storageType.name().toLowerCase().concat("_cluster"); } } }
class BrokerLoadJob extends BulkLoadJob { private static final Logger LOG = LogManager.getLogger(BrokerLoadJob.class); private Profile jobProfile; private boolean enableProfile = false; private boolean enableMemTableOnSinkNode = false; public BrokerLoadJob() { super(EtlJobType.BROKER); } public BrokerLoadJob(long dbId, String label, BrokerDesc brokerDesc, OriginStatement originStmt, UserIdentity userInfo) throws MetaNotFoundException { super(EtlJobType.BROKER, dbId, label, originStmt, userInfo); this.brokerDesc = brokerDesc; if (ConnectContext.get() != null) { enableProfile = ConnectContext.get().getSessionVariable().enableProfile(); enableMemTableOnSinkNode = ConnectContext.get().getSessionVariable().enableMemtableOnSinkNode; } } @Override public void beginTxn() throws LabelAlreadyUsedException, BeginTransactionException, AnalysisException, DuplicatedRequestException, QuotaExceedException, MetaNotFoundException { transactionId = Env.getCurrentGlobalTransactionMgr() .beginTransaction(dbId, Lists.newArrayList(fileGroupAggInfo.getAllTableIds()), label, null, new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), TransactionState.LoadJobSourceType.BATCH_LOAD_JOB, id, getTimeout()); } @Override protected void unprotectedExecuteJob() { LoadTask task = new BrokerLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(), brokerDesc, getPriority()); idToTasks.put(task.getSignature(), task); Env.getCurrentEnv().getPendingLoadTaskScheduler().submit(task); } /** * Situation1: When attachment is instance of BrokerPendingTaskAttachment, * this method is called by broker pending task. * LoadLoadingTask will be created after BrokerPendingTask is finished. * Situation2: When attachment is instance of BrokerLoadingTaskAttachment, this method is called by LoadLoadingTask. * CommitTxn will be called after all of LoadingTasks are finished. * * @param attachment */ @Override public void onTaskFinished(TaskAttachment attachment) { if (attachment instanceof BrokerPendingTaskAttachment) { onPendingTaskFinished((BrokerPendingTaskAttachment) attachment); } else if (attachment instanceof BrokerLoadingTaskAttachment) { onLoadingTaskFinished((BrokerLoadingTaskAttachment) attachment); } } /** * step1: divide job into loading task * step2: init the plan of task * step3: submit tasks into loadingTaskExecutor * @param attachment BrokerPendingTaskAttachment */ private void onPendingTaskFinished(BrokerPendingTaskAttachment attachment) { writeLock(); try { if (isTxnDone()) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("state", state) .add("error_msg", "this task will be ignored when job is: " + state) .build()); return; } if (finishedTaskIds.contains(attachment.getTaskId())) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("task_id", attachment.getTaskId()) .add("error_msg", "this is a duplicated callback of pending task " + "when broker already has loading task") .build()); return; } finishedTaskIds.add(attachment.getTaskId()); } finally { writeUnlock(); } try { Database db = getDb(); createLoadingTask(db, attachment); } catch (UserException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "Failed to divide job into loading task.") .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true); return; } catch (RejectedExecutionException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "the task queque is full.") .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true); return; } loadStartTimestamp = System.currentTimeMillis(); } private void onLoadingTaskFinished(BrokerLoadingTaskAttachment attachment) { writeLock(); try { if (isTxnDone()) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("state", state) .add("error_msg", "this task will be ignored when job is: " + state) .build()); return; } if (finishedTaskIds.contains(attachment.getTaskId())) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("task_id", attachment.getTaskId()) .add("error_msg", "this is a duplicated callback of loading task").build()); return; } finishedTaskIds.add(attachment.getTaskId()); updateLoadingStatus(attachment); if (finishedTaskIds.size() != idToTasks.size()) { return; } } finally { writeUnlock(); } if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.LOAD_JOB, id) .add("commit_infos", Joiner.on(",").join(commitInfos)) .build()); } if (!checkDataQuality()) { cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_QUALITY_UNSATISFIED, DataQualityException.QUALITY_FAIL_MSG), true, true); return; } Database db = null; List<Table> tableList = null; try { db = getDb(); tableList = db.getTablesOnIdOrderOrThrowException(Lists.newArrayList(fileGroupAggInfo.getAllTableIds())); MetaLockUtils.writeLockTablesOrMetaException(tableList); } catch (MetaNotFoundException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "db has been deleted when job is loading") .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true); return; } try { LOG.info(new LogBuilder(LogKey.LOAD_JOB, id) .add("txn_id", transactionId) .add("msg", "Load job try to commit txn") .build()); Env.getCurrentGlobalTransactionMgr().commitTransaction( dbId, tableList, transactionId, commitInfos, new LoadJobFinalOperation(id, loadingStatus, progress, loadStartTimestamp, finishTimestamp, state, failMsg)); } catch (UserException e) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("error_msg", "Failed to commit txn with error:" + e.getMessage()) .build(), e); cancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true); } finally { MetaLockUtils.writeUnlockTables(tableList); } } private void writeProfile() { if (!enableProfile) { return; } jobProfile.update(createTimestamp, getSummaryInfo(true), true, Integer.valueOf(sessionVariables.getOrDefault(SessionVariable.PROFILE_LEVEL, "3")), null, false); } private Map<String, String> getSummaryInfo(boolean isFinished) { long currentTimestamp = System.currentTimeMillis(); SummaryBuilder builder = new SummaryBuilder(); builder.profileId(String.valueOf(id)); if (Version.DORIS_BUILD_VERSION_MAJOR == 0) { builder.dorisVersion(Version.DORIS_BUILD_SHORT_HASH); } else { builder.dorisVersion(Version.DORIS_BUILD_VERSION + "-" + Version.DORIS_BUILD_SHORT_HASH); } builder.taskType(ProfileType.LOAD.name()); builder.startTime(TimeUtils.longToTimeString(createTimestamp)); if (isFinished) { builder.endTime(TimeUtils.longToTimeString(currentTimestamp)); builder.totalTime(DebugUtil.getPrettyStringMs(currentTimestamp - createTimestamp)); } builder.taskState("FINISHED"); builder.user(getUserInfo() != null ? getUserInfo().getQualifiedUser() : "N/A"); builder.defaultDb(getDefaultDb()); builder.sqlStatement(getOriginStmt().originStmt); return builder.build(); } private String getDefaultDb() { Database database = Env.getCurrentEnv().getInternalCatalog().getDb(this.dbId).orElse(null); return database == null ? "N/A" : database.getFullName(); } private void updateLoadingStatus(BrokerLoadingTaskAttachment attachment) { loadingStatus.replaceCounter(DPP_ABNORMAL_ALL, increaseCounter(DPP_ABNORMAL_ALL, attachment.getCounter(DPP_ABNORMAL_ALL))); loadingStatus.replaceCounter(DPP_NORMAL_ALL, increaseCounter(DPP_NORMAL_ALL, attachment.getCounter(DPP_NORMAL_ALL))); loadingStatus.replaceCounter(UNSELECTED_ROWS, increaseCounter(UNSELECTED_ROWS, attachment.getCounter(UNSELECTED_ROWS))); if (attachment.getTrackingUrl() != null) { loadingStatus.setTrackingUrl(attachment.getTrackingUrl()); } commitInfos.addAll(attachment.getCommitInfoList()); errorTabletInfos.addAll(attachment.getErrorTabletInfos().stream().limit(Config.max_error_tablet_of_broker_load) .collect(Collectors.toList())); progress = (int) ((double) finishedTaskIds.size() / idToTasks.size() * 100); if (progress == 100) { progress = 99; } } @Override public void updateProgress(Long beId, TUniqueId loadId, TUniqueId fragmentId, long scannedRows, long scannedBytes, boolean isDone) { super.updateProgress(beId, loadId, fragmentId, scannedRows, scannedBytes, isDone); progress = (int) ((double) loadStatistic.getLoadBytes() / loadStatistic.totalFileSizeB * 100); if (progress >= 100) { progress = 99; } } private String increaseCounter(String key, String deltaValue) { long value = 0; if (loadingStatus.getCounters().containsKey(key)) { value = Long.valueOf(loadingStatus.getCounters().get(key)); } if (deltaValue != null) { value += Long.valueOf(deltaValue); } return String.valueOf(value); } @Override public void afterVisible(TransactionState txnState, boolean txnOperated) { super.afterVisible(txnState, txnOperated); writeProfile(); } @Override public String getResourceName() { StorageBackend.StorageType storageType = brokerDesc.getStorageType(); if (storageType == StorageBackend.StorageType.BROKER) { return brokerDesc.getName(); } else if (storageType == StorageBackend.StorageType.S3) { return Optional.ofNullable(brokerDesc.getProperties()) .map(o -> o.get(S3Properties.Env.ENDPOINT)) .orElse("s3_cluster"); } else { return storageType.name().toLowerCase().concat("_cluster"); } } }
I think you should use the TCCL directly here.
private static InputStream getTikaConfigStream(TikaConfiguration config, String tikaXmlConfiguration) { InputStream is; if (config.tikaConfigPath.isPresent()) { is = TikaRecorder.class.getResourceAsStream(config.tikaConfigPath.get()); if (is == null) { is = Thread.currentThread().getContextClassLoader().getResourceAsStream(config.tikaConfigPath.get()); } if (is == null) { final String errorMessage = "tika-config.xml can not be found at " + config.tikaConfigPath.get(); throw new TikaParseException(errorMessage); } } else { is = new ByteArrayInputStream(tikaXmlConfiguration.getBytes(StandardCharsets.UTF_8)); } return is; }
is = Thread.currentThread().getContextClassLoader().getResourceAsStream(config.tikaConfigPath.get());
private static InputStream getTikaConfigStream(TikaConfiguration config, String tikaXmlConfiguration) { InputStream is; if (config.tikaConfigPath.isPresent()) { is = Thread.currentThread().getContextClassLoader().getResourceAsStream(config.tikaConfigPath.get()); if (is == null) { final String errorMessage = "tika-config.xml can not be found at " + config.tikaConfigPath.get(); throw new TikaParseException(errorMessage); } } else { is = new ByteArrayInputStream(tikaXmlConfiguration.getBytes(StandardCharsets.UTF_8)); } return is; }
class TikaRecorder { public void initTikaParser(BeanContainer container, TikaConfiguration config, String tikaXmlConfiguration) { TikaParser parser = initializeParser(config, tikaXmlConfiguration); TikaParserProducer producer = container.instance(TikaParserProducer.class); producer.initialize(parser); } private TikaParser initializeParser(TikaConfiguration config, String tikaXmlConfiguration) { TikaConfig tikaConfig; try (InputStream stream = getTikaConfigStream(config, tikaXmlConfiguration)) { tikaConfig = new TikaConfig(stream); } catch (Exception ex) { final String errorMessage = "Invalid tika-config.xml"; throw new TikaParseException(errorMessage, ex); } Parser nativeParser = new AutoDetectParser(tikaConfig); if (!config.appendEmbeddedContent) { nativeParser = new RecursiveParserWrapper(nativeParser, true); } return new TikaParser(nativeParser, config.appendEmbeddedContent); } }
class TikaRecorder { public void initTikaParser(BeanContainer container, TikaConfiguration config, String tikaXmlConfiguration) { TikaParser parser = initializeParser(config, tikaXmlConfiguration); TikaParserProducer producer = container.instance(TikaParserProducer.class); producer.initialize(parser); } private TikaParser initializeParser(TikaConfiguration config, String tikaXmlConfiguration) { TikaConfig tikaConfig; try (InputStream stream = getTikaConfigStream(config, tikaXmlConfiguration)) { tikaConfig = new TikaConfig(stream); } catch (Exception ex) { final String errorMessage = "Invalid tika-config.xml"; throw new TikaParseException(errorMessage, ex); } Parser nativeParser = new AutoDetectParser(tikaConfig); if (!config.appendEmbeddedContent) { nativeParser = new RecursiveParserWrapper(nativeParser, true); } return new TikaParser(nativeParser, config.appendEmbeddedContent); } }
Resolving this and should be handled depending on the fix for https://github.com/ballerina-platform/ballerina-lang/issues/37097
public void visit(BLangObjectConstructorExpression objectCtorExpression, AnalyzerData data) { BLangClassDefinition classNode = objectCtorExpression.classNode; classNode.oceEnvData.capturedClosureEnv = data.env; BLangClassDefinition originalClass = classNode.oceEnvData.originalClass; if (originalClass.cloneRef != null && !objectCtorExpression.defined) { classNode = (BLangClassDefinition) originalClass.cloneRef; symbolEnter.defineClassDefinition(classNode, data.env); objectCtorExpression.defined = true; } BObjectType objectType; if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) { objectType = (BObjectType) objectCtorExpression.classNode.getBType(); BType effectiveType = Types.getEffectiveType(Types.getReferredType(objectCtorExpression.expectedType)); if (effectiveType.tag == TypeTags.OBJECT) { BObjectType expObjType = (BObjectType) Types.getReferredType(effectiveType); objectType.typeIdSet = expObjType.typeIdSet; } else if (effectiveType.tag != TypeTags.NONE) { if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) { dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR, objectCtorExpression.expectedType); data.resultType = symTable.semanticError; return; } } } BLangTypeInit cIExpr = objectCtorExpression.typeInit; BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, data.env); if (actualType == symTable.semanticError) { data.resultType = symTable.semanticError; return; } BObjectType actualObjectType = (BObjectType) actualType; List<BLangType> typeRefs = classNode.typeRefs; SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, data.env); classNode.oceEnvData.typeInit = objectCtorExpression.typeInit; dlog.unmute(); if (Symbols.isFlagOn(data.expType.flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false, data); } else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true, data); } else { semanticAnalyzer.analyzeNode(classNode, typeDefEnv); } dlog.unmute(); markConstructedObjectIsolatedness(actualObjectType); if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation, data); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType, data)) { return; } } if (cIExpr.initInvocation.getBType() == null) { cIExpr.initInvocation.setBType(symTable.nilType); } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType(), data); data.resultType = types.checkType(cIExpr, actualTypeInitType, data.expType); }
BType effectiveType = Types.getEffectiveType(Types.getReferredType(objectCtorExpression.expectedType));
public void visit(BLangObjectConstructorExpression objectCtorExpression, AnalyzerData data) { BLangClassDefinition classNode = objectCtorExpression.classNode; classNode.oceEnvData.capturedClosureEnv = data.env; BLangClassDefinition originalClass = classNode.oceEnvData.originalClass; if (originalClass.cloneRef != null && !objectCtorExpression.defined) { classNode = (BLangClassDefinition) originalClass.cloneRef; symbolEnter.defineClassDefinition(classNode, data.env); objectCtorExpression.defined = true; } BObjectType objectType; if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) { objectType = (BObjectType) objectCtorExpression.classNode.getBType(); BType effectiveType = Types.getEffectiveType(Types.getReferredType(objectCtorExpression.expectedType)); if (effectiveType.tag == TypeTags.OBJECT) { BObjectType expObjType = (BObjectType) Types.getReferredType(effectiveType); objectType.typeIdSet = expObjType.typeIdSet; } else if (effectiveType.tag != TypeTags.NONE) { if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) { dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR, objectCtorExpression.expectedType); data.resultType = symTable.semanticError; return; } } } BLangTypeInit cIExpr = objectCtorExpression.typeInit; BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, data.env); if (actualType == symTable.semanticError) { data.resultType = symTable.semanticError; return; } BObjectType actualObjectType = (BObjectType) actualType; List<BLangType> typeRefs = classNode.typeRefs; SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, data.env); classNode.oceEnvData.typeInit = objectCtorExpression.typeInit; dlog.unmute(); if (Symbols.isFlagOn(data.expType.flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false, data); } else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true, data); } else { semanticAnalyzer.analyzeNode(classNode, typeDefEnv); } dlog.unmute(); markConstructedObjectIsolatedness(actualObjectType); if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation, data); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType, data)) { return; } } if (cIExpr.initInvocation.getBType() == null) { cIExpr.initInvocation.setBType(symTable.nilType); } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType(), data); data.resultType = types.checkType(cIExpr, actualTypeInitType, data.expType); }
class InferredTupleDetails { List<BType> fixedMemberTypes = new ArrayList<>(); List<BType> restMemberTypes = new ArrayList<>(); }
class InferredTupleDetails { List<BType> fixedMemberTypes = new ArrayList<>(); List<BType> restMemberTypes = new ArrayList<>(); }
Isn't this similar to L4017-L4035? Shall we check if we can extract this logic out to a method?
public void visit(BLangBinaryExpr binaryExpr) { if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) { BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType); BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType); if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) { resultType = symTable.semanticError; return; } resultType = BUnionType.create(null, lhsResultType, rhsResultType); return; } checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr); SymbolEnv rhsExprEnv; BType lhsType; if (binaryExpr.lhsExpr instanceof BLangNumericLiteral) { switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: if (binaryExpr.expectedType.tag != TypeTags.NONE && binaryExpr.expectedType.tag != TypeTags.ANY) { lhsType = checkExpectedTypeCompatibility(binaryExpr.lhsExpr, binaryExpr.expectedType, env); } else { lhsType = checkExpr(binaryExpr.lhsExpr, env);; } break; default: lhsType = checkExpr(binaryExpr.lhsExpr, env); } } else { lhsType = checkExpr(binaryExpr.lhsExpr, env); } if (binaryExpr.opKind == OperatorKind.AND) { rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true); } else if (binaryExpr.opKind == OperatorKind.OR) { rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env); } else { rhsExprEnv = env; } BType rhsType; if (binaryExpr.rhsExpr instanceof BLangNumericLiteral) { switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: if (binaryExpr.expectedType.tag != TypeTags.NONE && binaryExpr.expectedType.tag != TypeTags.ANY) { rhsType = checkExpectedTypeCompatibility(binaryExpr.rhsExpr, binaryExpr.expectedType, rhsExprEnv); } else { rhsType = checkExpectedTypeCompatibility(binaryExpr.rhsExpr, lhsType, rhsExprEnv); } break; default: rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); } } else { rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); } BType actualType = symTable.semanticError; switch (binaryExpr.opKind) { case ADD: BType leftConstituent = getXMLConstituents(lhsType); BType rightConstituent = getXMLConstituents(rhsType); if (leftConstituent != null && rightConstituent != null) { actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null); break; } default: if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) { BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind, lhsType, rhsType); } else { if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) && (couldHoldTableValues(lhsType, new ArrayList<>()) && couldHoldTableValues(rhsType, new ArrayList<>()))) { dlog.error(binaryExpr.pos, DiagnosticErrorCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; actualType = opSymbol.type.getReturnType(); } } } resultType = types.checkType(binaryExpr, actualType, expType); }
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
public void visit(BLangBinaryExpr binaryExpr) { if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) { BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType); BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType); if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) { resultType = symTable.semanticError; return; } resultType = BUnionType.create(null, lhsResultType, rhsResultType); return; } checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr); SymbolEnv rhsExprEnv; BType lhsType; if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) { lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr); } else { lhsType = checkExpr(binaryExpr.lhsExpr, env); } if (binaryExpr.opKind == OperatorKind.AND) { rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true); } else if (binaryExpr.opKind == OperatorKind.OR) { rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env); } else { rhsExprEnv = env; } BType rhsType; if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) { rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr); } else { rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); } BType actualType = symTable.semanticError; switch (binaryExpr.opKind) { case ADD: BType leftConstituent = getXMLConstituents(lhsType); BType rightConstituent = getXMLConstituents(rhsType); if (leftConstituent != null && rightConstituent != null) { actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null); break; } default: if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) { BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr); } if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType); } if (opSymbol == symTable.notFoundSymbol) { dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind, lhsType, rhsType); } else { if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) && (couldHoldTableValues(lhsType, new ArrayList<>()) && couldHoldTableValues(rhsType, new ArrayList<>()))) { dlog.error(binaryExpr.pos, DiagnosticErrorCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; actualType = opSymbol.type.getReturnType(); } } } resultType = types.checkType(binaryExpr, actualType, expType); }
class TypeChecker extends BLangNodeVisitor { private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>(); private static Set<String> listLengthModifierFunctions = new HashSet<>(); private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>(); private static final String TABLE_TNAME = "table"; private static final String LIST_LANG_LIB = "lang.array"; private static final String MAP_LANG_LIB = "lang.map"; private static final String TABLE_LANG_LIB = "lang.table"; private static final String VALUE_LANG_LIB = "lang.value"; private static final String XML_LANG_LIB = "lang.xml"; private static final String FUNCTION_NAME_PUSH = "push"; private static final String FUNCTION_NAME_POP = "pop"; private static final String FUNCTION_NAME_SHIFT = "shift"; private static final String FUNCTION_NAME_UNSHIFT = "unshift"; private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType"; private Names names; private SymbolTable symTable; private SymbolEnter symbolEnter; private SymbolResolver symResolver; private NodeCloner nodeCloner; private Types types; private BLangDiagnosticLog dlog; private SymbolEnv env; private boolean isTypeChecked; private TypeNarrower typeNarrower; private TypeParamAnalyzer typeParamAnalyzer; private BLangAnonymousModelHelper anonymousModelHelper; private SemanticAnalyzer semanticAnalyzer; private Unifier unifier; private boolean nonErrorLoggingCheck = false; private int letCount = 0; private Stack<SymbolEnv> queryEnvs, prevEnvs; private Stack<BLangSelectClause> selectClauses; private BLangMissingNodesHelper missingNodesHelper; /** * Expected types or inherited types. */ private BType expType; private BType resultType; private DiagnosticCode diagCode; static { listLengthModifierFunctions.add(FUNCTION_NAME_PUSH); listLengthModifierFunctions.add(FUNCTION_NAME_POP); listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT); listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT); modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeAll"); add("setLength"); add("reverse"); add("sort"); add("pop"); add("push"); add("shift"); add("unshift"); }}); modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{ add("put"); add("add"); add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{ add("mergeJson"); }}); modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{ add("setName"); add("setChildren"); add("strip"); }}); } public static TypeChecker getInstance(CompilerContext context) { TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY); if (typeChecker == null) { typeChecker = new TypeChecker(context); } return typeChecker; } public TypeChecker(CompilerContext context) { context.put(TYPE_CHECKER_KEY, this); this.names = Names.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); this.selectClauses = new Stack<>(); this.queryEnvs = new Stack<>(); this.prevEnvs = new Stack<>(); this.unifier = new Unifier(); } public BType checkExpr(BLangExpression expr, SymbolEnv env) { return checkExpr(expr, env, symTable.noType); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) { return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) { if (expr.typeChecked) { return expr.getBType(); } if (expType.tag == TypeTags.INTERSECTION) { expType = ((BIntersectionType) expType).effectiveType; } SymbolEnv prevEnv = this.env; BType preExpType = this.expType; DiagnosticCode preDiagCode = this.diagCode; this.env = env; this.diagCode = diagCode; this.expType = expType; this.isTypeChecked = true; expr.expectedType = expType; expr.accept(this); if (resultType.tag == TypeTags.INTERSECTION) { resultType = ((BIntersectionType) resultType).effectiveType; } expr.setBType(resultType); expr.typeChecked = isTypeChecked; this.env = prevEnv; this.expType = preExpType; this.diagCode = preDiagCode; validateAndSetExprExpectedType(expr); return resultType; } private void validateAndSetExprExpectedType(BLangExpression expr) { if (resultType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null && expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) { return; } expr.expectedType = resultType; } public void visit(BLangLiteral literalExpr) { BType literalType = setLiteralValueAndGetType(literalExpr, expType); if (literalType == symTable.semanticError || literalExpr.isFiniteContext) { return; } resultType = types.checkType(literalExpr, literalType, expType); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { checkXMLNamespacePrefixes(xmlElementAccess.filters); checkExpr(xmlElementAccess.expr, env, symTable.xmlType); resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { checkXMLNamespacePrefixes(xmlNavigation.filters); if (xmlNavigation.childIndex != null) { checkExpr(xmlNavigation.childIndex, env, symTable.intType); } BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType); if (exprType.tag == TypeTags.UNION) { dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS, xmlNavigation.expr.getBType()); } BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN ? symTable.xmlType : symTable.xmlElementSeqType; types.checkType(xmlNavigation, actualType, expType); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { resultType = symTable.xmlType; } else { resultType = symTable.xmlElementSeqType; } } private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) { for (BLangXMLElementFilter filter : filters) { if (!filter.namespace.isEmpty()) { Name nsName = names.fromString(filter.namespace); BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName); filter.namespaceSymbol = nsSymbol; if (nsSymbol == symTable.notFoundSymbol) { dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName); } } } } private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) { BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag); Object literalValue = literalExpr.value; if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) { if (expType.tag == TypeTags.FLOAT) { literalType = symTable.floatType; literalExpr.value = ((Long) literalValue).doubleValue(); } else if (expType.tag == TypeTags.DECIMAL && !NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) { literalType = symTable.decimalType; literalExpr.value = String.valueOf(literalValue); } else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) { literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue); if (literalType == symTable.semanticError) { return symTable.semanticError; } } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); BType intSubType = null; boolean intOrIntCompatibleTypeFound = false; for (BType memType : memberTypes) { if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) || memType.tag == TypeTags.BYTE) { intSubType = memType; } else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { intOrIntCompatibleTypeFound = true; } } if (intOrIntCompatibleTypeFound) { return setLiteralValueAndGetType(literalExpr, symTable.intType); } if (intSubType != null) { return setLiteralValueAndGetType(literalExpr, intSubType); } BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) { return setLiteralValueAndGetType(literalExpr, symTable.byteType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) { return setLiteralValueAndGetType(literalExpr, symTable.floatType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } } } else if (literalType.tag == TypeTags.FLOAT) { String literal = String.valueOf(literalValue); String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal); boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal); if (expType.tag == TypeTags.DECIMAL) { if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.floatType); resultType = symTable.semanticError; return resultType; } literalType = symTable.decimalType; literalExpr.value = numericLiteral; } else if (expType.tag == TypeTags.FLOAT) { literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral)); } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (!isDiscriminatedFloat && literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType); if (unionMember != symTable.noType) { return unionMember; } } } else if (literalType.tag == TypeTags.DECIMAL) { return decimalLiteral(literalValue, literalExpr, expType); } else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) { if (expType.tag == TypeTags.CHAR_STRING) { return symTable.charStringType; } if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); for (BType memType : memberTypes) { if (TypeTags.isStringTypeTag(memType.tag)) { return setLiteralValueAndGetType(literalExpr, memType); } else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { return setLiteralValueAndGetType(literalExpr, symTable.charStringType); } else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType, literalExpr)) { setLiteralValueForFiniteType(literalExpr, symTable.charStringType); return literalType; } } } boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else { if (this.expType.tag == TypeTags.FINITE) { boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else if (this.expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) this.expType; boolean foundMember = unionType.getMemberTypes() .stream() .anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr)); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } } if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) { literalType = new BArrayType(symTable.byteType); } return literalType; } private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) { Set<BType> memberTypes = expType.getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == desiredType.tag || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, desiredType); } BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } return symTable.noType; } private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType, int targetMemberTypeTag) { for (BLangExpression valueExpr : finiteType.getValueSpace()) { if (valueExpr.getBType().tag == targetMemberTypeTag && types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) { return true; } } return false; } private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) { String literal = String.valueOf(literalValue); if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType); resultType = symTable.semanticError; return resultType; } if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType); if (unionMember != symTable.noType) { return unionMember; } } literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal); resultType = symTable.decimalType; return symTable.decimalType; } private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) { types.setImplicitCastExpr(literalExpr, type, this.expType); this.resultType = type; literalExpr.isFiniteContext = true; } private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) { List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(memFiniteType -> (BFiniteType) memFiniteType) .collect(Collectors.toList()); if (finiteTypeMembers.isEmpty()) { return symTable.semanticError; } int tag = matchType.tag; Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>(); for (BFiniteType finiteType : finiteTypeMembers) { Set<BLangExpression> set = new HashSet<>(); for (BLangExpression expression : finiteType.getValueSpace()) { if (expression.getBType().tag == tag) { set.add(expression); } } matchedValueSpace.addAll(set); } if (matchedValueSpace.isEmpty()) { return symTable.semanticError; } return new BFiniteType(null, matchedValueSpace); } private BType getIntLiteralType(Location location, BType expType, BType literalType, Object literalValue) { switch (expType.tag) { case TypeTags.INT: return symTable.intType; case TypeTags.BYTE: if (types.isByteLiteralValue((Long) literalValue)) { return symTable.byteType; } break; case TypeTags.SIGNED32_INT: if (types.isSigned32LiteralValue((Long) literalValue)) { return symTable.signed32IntType; } break; case TypeTags.SIGNED16_INT: if (types.isSigned16LiteralValue((Long) literalValue)) { return symTable.signed16IntType; } break; case TypeTags.SIGNED8_INT: if (types.isSigned8LiteralValue((Long) literalValue)) { return symTable.signed8IntType; } break; case TypeTags.UNSIGNED32_INT: if (types.isUnsigned32LiteralValue((Long) literalValue)) { return symTable.unsigned32IntType; } break; case TypeTags.UNSIGNED16_INT: if (types.isUnsigned16LiteralValue((Long) literalValue)) { return symTable.unsigned16IntType; } break; case TypeTags.UNSIGNED8_INT: if (types.isUnsigned8LiteralValue((Long) literalValue)) { return symTable.unsigned8IntType; } break; default: } dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType); resultType = symTable.semanticError; return resultType; } @Override public void visit(BLangListConstructorExpr listConstructor) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) { BType inferredType = getInferredTupleType(listConstructor, expType); resultType = inferredType == symTable.semanticError ? symTable.semanticError : types.checkType(listConstructor, inferredType, expType); return; } resultType = checkListConstructorCompatibility(expType, listConstructor); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) { List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { resultType = symTable.semanticError; return; } } if (tableConstructorExpr.recordLiteralList.size() == 0) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE); resultType = symTable.semanticError; return; } BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr); BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { recordLiteral.setBType(inherentMemberType); } if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) { resultType = symTable.semanticError; return; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } resultType = tableType; return; } BType applicableExpType = expType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) expType).effectiveType : expType; if (applicableExpType.tag == TypeTags.TABLE) { List<BType> memTypes = new ArrayList<>(); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { BLangRecordLiteral clonedExpr = recordLiteral; if (this.nonErrorLoggingCheck) { clonedExpr.cloneAttempt++; clonedExpr = nodeCloner.clone(recordLiteral); } BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint); if (recordType == symTable.semanticError) { resultType = symTable.semanticError; return; } memTypes.add(recordType); } if (((BTableType) applicableExpType).constraint.tag == TypeTags.MAP && ((BTableType) applicableExpType).isTypeInlineDefined) { validateMapConstraintTable(tableConstructorExpr, applicableExpType); return; } if (!(validateTableType((BTableType) applicableExpType, tableConstructorExpr.recordLiteralList) && validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) { resultType = symTable.semanticError; return; } BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType), null); if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) { tableType.flags |= Flags.READONLY; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } BTableType expectedTableType = (BTableType) applicableExpType; if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) { tableType.fieldNameList = expectedTableType.fieldNameList; } resultType = tableType; } else if (applicableExpType.tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> matchingTypes = new ArrayList<>(); BUnionType expectedType = (BUnionType) applicableExpType; for (BType memType : expectedType.getMemberTypes()) { dlog.resetErrorCount(); BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; clonedTableExpr = nodeCloner.clone(tableConstructorExpr); } BType resultType = checkExpr(clonedTableExpr, env, memType); if (resultType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(matchingTypes, resultType)) { matchingTypes.add(resultType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (matchingTypes.isEmpty()) { BLangTableConstructorExpr exprToLog = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; exprToLog = nodeCloner.clone(tableConstructorExpr); } dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getInferredTableType(exprToLog)); } else if (matchingTypes.size() != 1) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); } else { resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0)); return; } resultType = symTable.semanticError; } else { resultType = symTable.semanticError; } } private BType getInferredTableType(BLangTableConstructorExpr exprToLog) { List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null); } private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { if (tableConstructorExpr.tableKeySpecifier != null) { if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr. tableKeySpecifier), tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return true; } tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); } return false; } private BType inferTableMemberType(List<BType> memTypes, BType expType) { if (memTypes.isEmpty()) { return ((BTableType) expType).constraint; } LinkedHashSet<BType> result = new LinkedHashSet<>(); result.add(memTypes.get(0)); BUnionType unionType = BUnionType.create(null, result); for (int i = 1; i < memTypes.size(); i++) { BType source = memTypes.get(i); if (!types.isAssignable(source, unionType)) { result.add(source); unionType = BUnionType.create(null, result); } } if (unionType.getMemberTypes().size() == 1) { return memTypes.get(0); } return unionType; } private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) { BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier; List<String> keySpecifierFieldNames = new ArrayList<>(); Set<BField> allFieldSet = new LinkedHashSet<>(); for (BType memType : memTypes) { allFieldSet.addAll(((BRecordType) memType).fields.values()); } Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet); for (BType memType : memTypes) { commonFieldSet.retainAll(((BRecordType) memType).fields.values()); } List<String> requiredFieldNames = new ArrayList<>(); if (keySpecifier != null) { for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) { requiredFieldNames.add(((BLangIdentifier) identifierNode).value); keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value); } } List<String> fieldNames = new ArrayList<>(); for (BField field : allFieldSet) { String fieldName = field.name.value; if (fieldNames.contains(fieldName)) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY, fieldName); return symTable.semanticError; } fieldNames.add(fieldName); boolean isOptional = true; for (BField commonField : commonFieldSet) { if (commonField.name.value.equals(fieldName)) { isOptional = false; requiredFieldNames.add(commonField.name.value); } } if (isOptional) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL)); } else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY)); } else if (requiredFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)); } } return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos); } private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL); for (BField field : allFieldSet) { recordSymbol.scope.define(field.name, field.symbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = allFieldSet.stream().collect(getFieldCollector()); recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); recordType.sealed = true; recordType.restFieldType = symTable.noType; return recordType; } private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() { BinaryOperator<BField> mergeFunc = (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new); } private boolean validateTableType(BTableType tableType, List<BLangRecordLiteral> recordLiterals) { BType constraint = tableType.constraint; if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) { dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint); resultType = symTable.semanticError; return false; } List<String> fieldNameList = tableType.fieldNameList; if (fieldNameList != null) { boolean isKeySpecifierValidated = !tableType.isTypeInlineDefined || validateKeySpecifier(fieldNameList, constraint.tag != TypeTags.INTERSECTION ? constraint : ((BIntersectionType) constraint).effectiveType, tableType.keyPos); return (isKeySpecifierValidated && validateTableKeyValue(fieldNameList, recordLiterals)); } return true; } private boolean validateTableKeyValue(List<String> keySpecifierFieldNames, List<BLangRecordLiteral> recordLiterals) { for (String fieldName : keySpecifierFieldNames) { for (BLangRecordLiteral recordLiteral : recordLiterals) { BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName); if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) { continue; } dlog.error(recordLiteral.pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName); resultType = symTable.semanticError; return false; } } return true; } private boolean isConstExpression(BLangExpression expression) { switch(expression.getKind()) { case LITERAL: case NUMERIC_LITERAL: case STRING_TEMPLATE_LITERAL: case XML_ELEMENT_LITERAL: case XML_TEXT_LITERAL: case LIST_CONSTRUCTOR_EXPR: case TABLE_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: case TYPE_CONVERSION_EXPR: case UNARY_EXPR: case BINARY_EXPR: case TYPE_TEST_EXPR: case TERNARY_EXPR: return true; case SIMPLE_VARIABLE_REF: return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; case GROUP_EXPR: return isConstExpression(((BLangGroupExpr) expression).expression); default: return false; } } private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral, String fieldName) { for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) { BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField; if (fieldName.equals(recordKeyValueField.key.toString())) { return recordKeyValueField; } } return null; } public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint, Location pos) { for (String fieldName : fieldNameList) { BField field = types.getTableConstraintField(constraint, fieldName); if (field == null) { dlog.error(pos, DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName); resultType = symTable.semanticError; return false; } if (!types.isAssignable(field.type, symTable.anydataType)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint); resultType = symTable.semanticError; return false; } } return true; } private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { BType constraintType = tableType.constraint; if (tableConstructorExpr.tableKeySpecifier != null) { List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); if (tableType.fieldNameList == null && !validateKeySpecifier(fieldNameList, constraintType.tag != TypeTags.INTERSECTION ? constraintType : ((BIntersectionType) constraintType).effectiveType, tableConstructorExpr.tableKeySpecifier.pos)) { return false; } if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH, tableType.fieldNameList.toString(), fieldNameList.toString()); resultType = symTable.semanticError; return false; } } BType keyTypeConstraint = tableType.keyTypeConstraint; if (keyTypeConstraint != null) { List<BType> memberTypes = new ArrayList<>(); if (keyTypeConstraint.tag == TypeTags.TUPLE) { for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) { memberTypes.add((BType) type); } } else { memberTypes.add(keyTypeConstraint); } if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) { return true; } if (tableConstructorExpr.tableKeySpecifier == null || tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT, memberTypes.size(), tableConstructorExpr.tableKeySpecifier == null ? 0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size()); resultType = symTable.semanticError; return false; } List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier. fieldNameIdentifierList; int index = 0; for (IdentifierNode identifier : fieldNameIdentifierList) { BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value); if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT, fieldNameIdentifierList.toString(), memberTypes.toString()); resultType = symTable.semanticError; return false; } index++; } } return true; } public void validateMapConstraintTable(BLangTableConstructorExpr tableConstructorExpr, BType expType) { if (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) { dlog.error(((BTableType) expType).keyPos, DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (tableConstructorExpr != null && tableConstructorExpr.tableKeySpecifier != null) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (tableConstructorExpr != null && !(validateTableType((BTableType) expType, tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return; } resultType = expType; } private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) { List<String> fieldNamesList = new ArrayList<>(); for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) { fieldNamesList.add(((BLangIdentifier) identifier).value); } return fieldNamesList; } private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) { if (fieldNames == null) { return symTable.semanticError; } List<BType> memTypes = new ArrayList<>(); for (String fieldName : fieldNames) { BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName); if (tableConstraintField == null) { return symTable.semanticError; } BType fieldType = tableConstraintField.type; memTypes.add(fieldType); } if (memTypes.size() == 1) { return memTypes.get(0); } return new BTupleType(memTypes); } private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.nonErrorLoggingCheck = true; this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.clone(listConstructor); } BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType); if (!erroredExpType && inferredTupleType != symTable.semanticError) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType); } return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor); } if (tag == TypeTags.INTERSECTION) { return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor); } BType possibleType = getListConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.ARRAY: return checkArrayType(listConstructor, (BArrayType) possibleType); case TypeTags.TUPLE: return checkTupleType(listConstructor, (BTupleType) possibleType); case TypeTags.READONLY: return checkReadOnlyListType(listConstructor); case TypeTags.TYPEDESC: List<BType> results = new ArrayList<>(); listConstructor.isTypedescExpr = true; for (int i = 0; i < listConstructor.exprs.size(); i++) { results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType)); } List<BType> actualTypes = new ArrayList<>(); for (int i = 0; i < listConstructor.exprs.size(); i++) { final BLangExpression expr = listConstructor.exprs.get(i); if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) { actualTypes.add(((BLangTypedescExpr) expr).resolvedType); } else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { actualTypes.add(((BLangSimpleVarRef) expr).symbol.type); } else { actualTypes.add(results.get(i)); } } if (actualTypes.size() == 1) { listConstructor.typedescType = actualTypes.get(0); } else { listConstructor.typedescType = new BTupleType(actualTypes); } return new BTypedescType(listConstructor.typedescType, null); } BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.clone(listConstructor); } if (bType == symTable.semanticError) { getInferredTupleType(exprToLog, symTable.semanticError); } else { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType, getInferredTupleType(exprToLog, symTable.noType)); } return symTable.semanticError; } private BType getListConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.READONLY: case TypeTags.TYPEDESC: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) { BType eType = arrayType.eType; if (arrayType.state == BArrayState.INFERRED) { arrayType.size = listConstructor.exprs.size(); arrayType.state = BArrayState.CLOSED; } else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) { if (arrayType.size < listConstructor.exprs.size()) { dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size()); return symTable.semanticError; } if (!types.hasFillerValue(eType)) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType); return symTable.semanticError; } } boolean errored = false; for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(eType, expr) && !errored) { errored = true; } } return errored ? symTable.semanticError : arrayType; } private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) { List<BLangExpression> exprs = listConstructor.exprs; List<BType> memberTypes = tupleType.tupleTypes; BType restType = tupleType.restType; int listExprSize = exprs.size(); int memberTypeSize = memberTypes.size(); if (listExprSize < memberTypeSize) { for (int i = listExprSize; i < memberTypeSize; i++) { if (!types.hasFillerValue(memberTypes.get(i))) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } } } else if (listExprSize > memberTypeSize && restType == null) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } boolean errored = false; int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize; for (int i = 0; i < nonRestCountToCheck; i++) { if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) { errored = true; } } for (int i = nonRestCountToCheck; i < exprs.size(); i++) { if (exprIncompatible(restType, exprs.get(i)) && !errored) { errored = true; } } return errored ? symTable.semanticError : tupleType; } private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return types.checkType(listConstructor, inferredType, symTable.readonlyType); } for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(symTable.readonlyType, expr)) { return symTable.semanticError; } } return symTable.readonlyType; } private boolean exprIncompatible(BType eType, BLangExpression expr) { if (expr.typeChecked) { return expr.getBType() == symTable.semanticError; } BLangExpression exprToCheck = expr; if (this.nonErrorLoggingCheck) { expr.cloneAttempt++; exprToCheck = nodeCloner.clone(expr); } return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError; } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) { return checkExprList(exprs, env, symTable.noType); } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) { List<BType> types = new ArrayList<>(); SymbolEnv prevEnv = this.env; BType preExpType = this.expType; this.env = env; this.expType = expType; for (BLangExpression e : exprs) { checkExpr(e, this.env, expType); types.add(resultType); } this.env = prevEnv; this.expType = preExpType; return types; } private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) { List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } BTupleType tupleType = new BTupleType(memTypes); if (expType.tag != TypeTags.READONLY) { return tupleType; } tupleType.flags |= Flags.READONLY; return tupleType; } public void visit(BLangRecordLiteral recordLiteral) { int expTypeTag = expType.tag; if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) { expType = defineInferredRecordType(recordLiteral, expType); } else if (expTypeTag == TypeTags.OBJECT) { dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType); resultType = symTable.semanticError; return; } resultType = getEffectiveMappingType(recordLiteral, checkMappingConstructorCompatibility(expType, recordLiteral)); } private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) { if (applicableMappingType == symTable.semanticError || (applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags, Flags.READONLY))) { return applicableMappingType; } Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>(); LinkedHashMap<String, BField> applicableTypeFields = applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields : new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { continue; } String name; if (field.isKeyValueField()) { BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field; if (!keyValueField.readonly) { continue; } BLangExpression keyExpr = keyValueField.key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { name = ((BLangSimpleVarRef) keyExpr).variableName.value; } else { name = (String) ((BLangLiteral) keyExpr).value; } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; if (!varNameField.readonly) { continue; } name = varNameField.variableName.value; } if (applicableTypeFields.containsKey(name) && Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) { continue; } readOnlyFields.put(name, field); } if (readOnlyFields.isEmpty()) { return applicableMappingType; } PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); LinkedHashMap<String, BField> newFields = new LinkedHashMap<>(); for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) { RecordLiteralNode.RecordField field = readOnlyEntry.getValue(); String key = readOnlyEntry.getKey(); Name fieldName = names.fromString(key); BType readOnlyFieldType; if (field.isKeyValueField()) { readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType(); } else { readOnlyFieldType = ((BLangRecordVarNameField) field).getBType(); } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(Flag.REQUIRED); add(Flag.READONLY); }}), fieldName, pkgID, readOnlyFieldType, recordSymbol, ((BLangNode) field).pos, VIRTUAL); newFields.put(key, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags); if (applicableMappingType.tag == TypeTags.MAP) { recordType.sealed = false; recordType.restFieldType = ((BMapType) applicableMappingType).constraint; } else { BRecordType applicableRecordType = (BRecordType) applicableMappingType; boolean allReadOnlyFields = true; for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) { String fieldName = origEntry.getKey(); BField field = origEntry.getValue(); if (readOnlyFields.containsKey(fieldName)) { continue; } BVarSymbol origFieldSymbol = field.symbol; long origFieldFlags = origFieldSymbol.flags; if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) { allReadOnlyFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID, origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL); newFields.put(fieldName, new BField(field.name, null, fieldSymbol)); recordSymbol.scope.define(field.name, fieldSymbol); } recordType.sealed = applicableRecordType.sealed; recordType.restFieldType = applicableRecordType.restFieldType; if (recordType.sealed && allReadOnlyFields) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } recordType.fields = newFields; recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); if (applicableMappingType.tag == TypeTags.MAP) { recordLiteral.expectedType = applicableMappingType; } return recordType; } private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType, mappingConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { if (!erroredExpType) { reportIncompatibleMappingConstructorError(mappingConstructor, bType); } validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor); } if (tag == TypeTags.INTERSECTION) { return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor); } BType possibleType = getMappingConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.MAP: return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType : symTable.semanticError; case TypeTags.RECORD: boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType); boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType, mappingConstructor.fields, mappingConstructor.pos); return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError; case TypeTags.READONLY: return checkReadOnlyMappingType(mappingConstructor); } reportIncompatibleMappingConstructorError(mappingConstructor, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return checkMappingConstructorCompatibility(inferredType, mappingConstructor); } for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BLangExpression exprToCheck; if (field.isKeyValueField()) { exprToCheck = ((BLangRecordKeyValueField) field).valueExpr; } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } else { exprToCheck = (BLangRecordVarNameField) field; } if (exprIncompatible(symTable.readonlyType, exprToCheck)) { return symTable.semanticError; } } return symTable.readonlyType; } private BType getMappingConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.MAP: case TypeTags.RECORD: case TypeTags.READONLY: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private boolean isMappingConstructorCompatibleType(BType type) { return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP; } private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) { if (expType == symTable.semanticError) { return; } if (expType.tag != TypeTags.UNION) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType); return; } BUnionType unionType = (BUnionType) expType; BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]); if (memberTypes.length == 2) { BRecordType recType = null; if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[0]; } else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[1]; } if (recType != null) { validateSpecifiedFields(mappingConstructorExpr, recType); validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos); return; } } for (BType bType : memberTypes) { if (isMappingConstructorCompatibleType(bType)) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR, unionType); return; } } dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType); } private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) { boolean isFieldsValid = true; for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BType checkedType = checkMappingField(field, possibleType); if (isFieldsValid && checkedType == symTable.semanticError) { isFieldsValid = false; } } return isFieldsValid; } private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields, Location pos) { HashSet<String> specFieldNames = getFieldNames(specifiedFields); boolean hasAllRequiredFields = true; for (BField field : type.fields.values()) { String fieldName = field.name.value; if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED) && !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); if (hasAllRequiredFields) { hasAllRequiredFields = false; } } } return hasAllRequiredFields; } private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) { HashSet<String> fieldNames = new HashSet<>(); for (RecordLiteralNode.RecordField specifiedField : specifiedFields) { if (specifiedField.isKeyValueField()) { String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField); if (name == null) { continue; } fieldNames.add(name); } else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField)); } else { fieldNames.addAll(getSpreadOpFieldRequiredFieldNames( (BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField)); } } return fieldNames; } private String getKeyValueFieldName(BLangRecordKeyValueField field) { BLangRecordKey key = field.key; if (key.computedKey) { return null; } BLangExpression keyExpr = key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return ((BLangSimpleVarRef) keyExpr).variableName.value; } else if (keyExpr.getKind() == NodeKind.LITERAL) { return (String) ((BLangLiteral) keyExpr).value; } return null; } private String getVarNameFieldName(BLangRecordVarNameField field) { return field.variableName.value; } private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) { BType spreadType = checkExpr(field.expr, env); if (spreadType.tag != TypeTags.RECORD) { return Collections.emptyList(); } List<String> fieldNames = new ArrayList<>(); for (BField bField : ((BRecordType) spreadType).getFields().values()) { if (!Symbols.isOptional(bField.symbol)) { fieldNames.add(bField.name.value); } } return fieldNames; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier != null) { String workerName = workerFlushExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } else { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName)); if (symbol != symTable.notFoundSymbol) { workerFlushExpr.workerSymbol = symbol; } } } BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(workerFlushExpr, actualType, expType); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if (symTable.notFoundSymbol.equals(symbol)) { syncSendExpr.workerType = symTable.semanticError; } else { syncSendExpr.workerType = symbol.type; syncSendExpr.workerSymbol = symbol; } syncSendExpr.env = this.env; checkExpr(syncSendExpr.expr, this.env); if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.getBType()); } String workerName = syncSendExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } syncSendExpr.expectedType = expType; resultType = expType == symTable.noType ? symTable.nilType : expType; } @Override public void visit(BLangWorkerReceive workerReceiveExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier)); workerReceiveExpr.env = this.env; if (symTable.notFoundSymbol.equals(symbol)) { workerReceiveExpr.workerType = symTable.semanticError; } else { workerReceiveExpr.workerType = symbol.type; workerReceiveExpr.workerSymbol = symbol; } if (symTable.noType == this.expType) { this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION); } workerReceiveExpr.setBType(this.expType); resultType = this.expType; } private boolean workerExists(SymbolEnv env, String workerName) { if (workerName.equals(DEFAULT_WORKER_NAME)) { return true; } BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName)); return symbol != this.symTable.notFoundSymbol && symbol.type.tag == TypeTags.FUTURE && ((BFutureType) symbol.type).workerDerivative; } @Override public void visit(BLangConstRef constRef) { constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env, names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName)); types.setImplicitCastExpr(constRef, constRef.getBType(), expType); resultType = constRef.getBType(); } public void visit(BLangSimpleVarRef varRefExpr) { BType actualType = symTable.semanticError; Name varName = names.fromIdNode(varRefExpr.variableName); if (varName == Names.IGNORE) { if (varRefExpr.isLValue) { varRefExpr.setBType(this.symTable.anyType); } else { varRefExpr.setBType(this.symTable.semanticError); dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED); } varRefExpr.symbol = new BVarSymbol(0, true, varName, env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner, varRefExpr.pos, VIRTUAL); resultType = varRefExpr.getBType(); return; } Name compUnitName = getCurrentCompUnit(varRefExpr); varRefExpr.pkgSymbol = symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName); if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) { varRefExpr.symbol = symTable.notFoundSymbol; dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias); } if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { actualType = symTable.stringType; } else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env, names.fromIdNode(varRefExpr.pkgAlias), varName); if (symbol == symTable.notFoundSymbol && env.enclType != null) { Name objFuncName = names.fromString(Symbols .getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value)); symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName, env.enclType.getBType().tsymbol); } if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) { BVarSymbol varSym = (BVarSymbol) symbol; checkSelfReferences(varRefExpr.pos, env, varSym); varRefExpr.symbol = varSym; actualType = varSym.type; markAndRegisterClosureVariable(symbol, varRefExpr.pos, env); } else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) { actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null); varRefExpr.symbol = symbol; } else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) symbol; varRefExpr.symbol = constSymbol; BType symbolType = symbol.type; if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE || (expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream() .anyMatch(memType -> memType.tag == TypeTags.FINITE && types.isAssignable(symbolType, memType)))) { actualType = symbolType; } else { actualType = constSymbol.literalType; } if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) { actualType = symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE); } } else { varRefExpr.symbol = symbol; logUndefinedSymbolError(varRefExpr.pos, varName.value); } } if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE); return; } resultType = types.checkType(varRefExpr, actualType, expType); } @Override public void visit(BLangRecordVarRef varRefExpr) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName), env.enclPkg.symbol.pkgID, null, env.scope.owner, varRefExpr.pos, SOURCE); symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env); boolean unresolvedReference = false; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference; bLangVarReference.isLValue = true; checkExpr(recordRefField.variableReference, env); if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol || !isValidVariableReference(recordRefField.variableReference)) { unresolvedReference = true; continue; } BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol; BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos, new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol, varRefExpr.pos, SOURCE)); fields.put(field.name.value, field); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { checkExpr(restParam, env); unresolvedReference = !isValidVariableReference(restParam); } if (unresolvedReference) { resultType = symTable.semanticError; return; } BRecordType bRecordType = new BRecordType(recordSymbol); bRecordType.fields = fields; recordSymbol.type = bRecordType; varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos, SOURCE); if (restParam == null) { bRecordType.sealed = true; bRecordType.restFieldType = symTable.noType; } else if (restParam.getBType() == symTable.semanticError) { bRecordType.restFieldType = symTable.mapType; } else { BType restFieldType; if (restParam.getBType().tag == TypeTags.RECORD) { restFieldType = ((BRecordType) restParam.getBType()).restFieldType; } else if (restParam.getBType().tag == TypeTags.MAP) { restFieldType = ((BMapType) restParam.getBType()).constraint; } else { restFieldType = restParam.getBType(); } bRecordType.restFieldType = restFieldType; } resultType = bRecordType; } @Override public void visit(BLangErrorVarRef varRefExpr) { if (varRefExpr.typeNode != null) { BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env); varRefExpr.setBType(bType); checkIndirectErrorVarRef(varRefExpr); resultType = bType; return; } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) { dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, varRefExpr.message.getBType()); } } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) { dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType, varRefExpr.cause.getBType()); } } boolean unresolvedReference = false; for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { BLangVariableReference refItem = (BLangVariableReference) detailItem.expr; refItem.isLValue = true; checkExpr(refItem, env); if (!isValidVariableReference(refItem)) { unresolvedReference = true; continue; } if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN, refItem); unresolvedReference = true; continue; } if (refItem.symbol == null) { unresolvedReference = true; } } if (varRefExpr.restVar != null) { varRefExpr.restVar.isLValue = true; if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { checkExpr(varRefExpr.restVar, env); unresolvedReference = unresolvedReference || varRefExpr.restVar.symbol == null || !isValidVariableReference(varRefExpr.restVar); } } if (unresolvedReference) { resultType = symTable.semanticError; return; } BType errorRefRestFieldType; if (varRefExpr.restVar == null) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { errorRefRestFieldType = varRefExpr.restVar.getBType(); } else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) { errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint; } else { dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, varRefExpr.restVar.getBType(), symTable.detailType); resultType = symTable.semanticError; return; } BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly ? symTable.errorType.detailType : new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC); resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType); } private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) { for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { checkExpr(detailItem.expr, env); checkExpr(detailItem, env, detailItem.expr.getBType()); } if (varRefExpr.restVar != null) { checkExpr(varRefExpr.restVar, env); } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); } } @Override public void visit(BLangTupleVarRef varRefExpr) { List<BType> results = new ArrayList<>(); for (int i = 0; i < varRefExpr.expressions.size(); i++) { ((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true; results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType)); } BTupleType actualType = new BTupleType(results); if (varRefExpr.restParam != null) { BLangExpression restExpr = (BLangExpression) varRefExpr.restParam; ((BLangVariableReference) restExpr).isLValue = true; BType checkedType = checkExpr(restExpr, env, symTable.noType); if (checkedType.tag != TypeTags.ARRAY) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType); resultType = symTable.semanticError; return; } actualType.restType = ((BArrayType) checkedType).eType; } resultType = types.checkType(varRefExpr, actualType, expType); } /** * This method will recursively check if a multidimensional array has at least one open sealed dimension. * * @param arrayType array to check if open sealed * @return true if at least one dimension is open sealed */ public boolean isArrayOpenSealedType(BArrayType arrayType) { if (arrayType.state == BArrayState.INFERRED) { return true; } if (arrayType.eType.tag == TypeTags.ARRAY) { return isArrayOpenSealedType((BArrayType) arrayType.eType); } return false; } /** * This method will recursively traverse and find the symbol environment of a lambda node (which is given as the * enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the * enclosing invokable node's environment, which are outside of the scope of a lambda function. */ private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclInvokable != null && env.enclInvokable == encInvokable) { return findEnclosingInvokableEnv(env.enclEnv, encInvokable); } return env; } private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclType != null && env.enclType == recordTypeNode) { return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode); } return env; } private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) { return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) && param.getBType().tag == symbol.type.tag)); } public void visit(BLangFieldBasedAccess fieldAccessExpr) { markLeafNode(fieldAccessExpr); BLangExpression containerExpression = fieldAccessExpr.expr; if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = fieldAccessExpr.isCompoundAssignmentLValue; } BType varRefType = types.getTypeWithEffectiveIntersectionTypes(getTypeOfExprInFieldAccess(containerExpression)); if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess && !isXmlAccess(fieldAccessExpr)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION); resultType = symTable.semanticError; return; } BType actualType; if (fieldAccessExpr.optionalFieldAccess) { if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS); resultType = symTable.semanticError; return; } actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); } else { actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); if (actualType != symTable.semanticError && (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(varRefType)) { if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return; } } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) && isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, fieldAccessExpr.field.value, varRefType); resultType = symTable.semanticError; return; } } } resultType = types.checkType(fieldAccessExpr, actualType, this.expType); } private boolean isAllReadonlyTypes(BType type) { if (type.tag != TypeTags.UNION) { return Symbols.isFlagOn(type.flags, Flags.READONLY); } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isAllReadonlyTypes(memberType)) { return false; } } return true; } private boolean isInitializationInInit(BType type) { BObjectType objectType = (BObjectType) type; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; return env.enclInvokable != null && initializerFunc != null && env.enclInvokable.symbol == initializerFunc.symbol; } private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) { if (type.tag == TypeTags.RECORD) { if (Symbols.isFlagOn(type.flags, Flags.READONLY)) { return true; } BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (!field.name.value.equals(fieldName)) { continue; } return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY); } return recordType.sealed; } boolean allInvalidUpdates = true; for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) { allInvalidUpdates = false; } } return allInvalidUpdates; } private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; BType exprType = expr.getBType(); if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) { return true; } if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr) && exprType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes(); return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType); } return false; } public void visit(BLangIndexBasedAccess indexBasedAccessExpr) { markLeafNode(indexBasedAccessExpr); BLangExpression containerExpression = indexBasedAccessExpr.expr; if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, ((BLangTypedescExpr) containerExpression).typeNode); resultType = symTable.semanticError; return; } if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = indexBasedAccessExpr.isCompoundAssignmentLValue; } boolean isStringValue = containerExpression.getBType() != null && containerExpression.getBType().tag == TypeTags.STRING; if (!isStringValue) { checkExpr(containerExpression, this.env, symTable.noType); } if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY && containerExpression.getBType().tag != TypeTags.TABLE) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED, containerExpression.getBType()); resultType = symTable.semanticError; return; } BType actualType = checkIndexAccessExpr(indexBasedAccessExpr); BType exprType = containerExpression.getBType(); BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; if (actualType != symTable.semanticError && (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(exprType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, exprType); resultType = symTable.semanticError; return; } else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && (indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) && isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, getConstFieldName(indexExpr), exprType); resultType = symTable.semanticError; return; } } if (indexBasedAccessExpr.isLValue) { indexBasedAccessExpr.originalType = actualType; indexBasedAccessExpr.setBType(actualType); resultType = actualType; return; } this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType); } public void visit(BLangInvocation iExpr) { if (iExpr.expr == null) { checkFunctionInvocationExpr(iExpr); return; } if (invalidModuleAliasUsage(iExpr)) { return; } checkExpr(iExpr.expr, this.env, symTable.noType); BType varRefType = iExpr.expr.getBType(); switch (varRefType.tag) { case TypeTags.OBJECT: checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType); break; case TypeTags.RECORD: checkFieldFunctionPointer(iExpr, this.env); break; case TypeTags.NONE: dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name); break; case TypeTags.SEMANTIC_ERROR: break; default: checkInLangLib(iExpr, varRefType); } } public void visit(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef; if (userProvidedTypeRef != null) { symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR); } validateErrorConstructorPositionalArgs(errorConstructorExpr); List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr); List<BType> errorDetailTypes = new ArrayList<>(); for (BType expandedCandidate : expandedCandidates) { BType detailType = ((BErrorType) expandedCandidate).detailType; errorDetailTypes.add(detailType); } BType detailCandidate; if (errorDetailTypes.size() == 1) { detailCandidate = errorDetailTypes.get(0); } else { detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes)); } BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr); BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env); int index = errorDetailTypes.indexOf(inferredDetailType); BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index); if (selectedCandidate != symTable.semanticError && (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) { checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType); resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); return; } if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType); } BErrorType errorType; if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) { errorType = (BErrorType) userProvidedTypeRef.getBType(); } else if (expandedCandidates.size() == 1) { errorType = (BErrorType) expandedCandidates.get(0); } else { errorType = symTable.errorType; } List<BLangNamedArgsExpression> namedArgs = checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType); BType detailType = errorType.detailType; if (detailType.tag == TypeTags.MAP) { BType errorDetailTypeConstraint = ((BMapType) detailType).constraint; for (BLangNamedArgsExpression namedArgExpr: namedArgs) { if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) { dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType()); } } } else if (detailType.tag == TypeTags.RECORD) { BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType; LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream() .filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED) .map(f -> f.name.value) .collect(Collectors.toCollection(LinkedList::new)); LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields; for (BLangNamedArgsExpression namedArg : namedArgs) { BField field = targetFields.get(namedArg.name.value); Location pos = namedArg.pos; if (field == null) { if (targetErrorDetailRec.sealed) { dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC, namedArg.name, targetErrorDetailRec); } else if (targetFields.isEmpty() && !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE, namedArg.name, targetErrorDetailRec); } } else { missingRequiredFields.remove(namedArg.name.value); if (!types.isAssignable(namedArg.expr.getBType(), field.type)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArg.name, field.type, namedArg.expr.getBType()); } } } for (String requiredField : missingRequiredFields) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField); } } if (userProvidedTypeRef != null) { errorConstructorExpr.setBType(userProvidedTypeRef.getBType()); } else { errorConstructorExpr.setBType(errorType); } resultType = errorConstructorExpr.getBType(); } private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) { if (errorConstructorExpr.positionalArgs.isEmpty()) { return; } checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType); int positionalArgCount = errorConstructorExpr.positionalArgs.size(); if (positionalArgCount > 1) { checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType); } } private BType checkExprSilent(BLangRecordLiteral recordLiteral, BType expType, SymbolEnv env) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); BType type = checkExpr(recordLiteral, env, expType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return type; } private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) { BLangRecordKeyValueField field = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); field.valueExpr = (BLangExpression) namedArg.getExpression(); BLangLiteral expr = new BLangLiteral(); expr.value = namedArg.getName().value; expr.setBType(symTable.stringType); field.key = new BLangRecordKey(expr); recordLiteral.fields.add(field); } return recordLiteral; } private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef; if (errorTypeRef == null) { if (expType.tag == TypeTags.ERROR) { return List.of(expType); } else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) { return expandExpectedErrorTypes(expType); } } else { if (errorTypeRef.getBType().tag != TypeTags.ERROR) { if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) { dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef); } } else { return List.of(errorTypeRef.getBType()); } } return List.of(symTable.errorType); } private List<BType> expandExpectedErrorTypes(BType candidateType) { List<BType> expandedCandidates = new ArrayList<>(); if (candidateType.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) { if (types.isAssignable(memberType, symTable.errorType)) { if (memberType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) memberType).effectiveType); } else { expandedCandidates.add(memberType); } } } } else if (types.isAssignable(candidateType, symTable.errorType)) { if (candidateType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) candidateType).effectiveType); } else { expandedCandidates.add(candidateType); } } return expandedCandidates; } public void visit(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr == null) { checkFunctionInvocationExpr(aInv); return; } if (invalidModuleAliasUsage(aInv)) { return; } checkExpr(aInv.expr, this.env, symTable.noType); BLangExpression varRef = aInv.expr; switch (varRef.getBType().tag) { case TypeTags.OBJECT: checkActionInvocation(aInv, (BObjectType) varRef.getBType()); break; case TypeTags.RECORD: checkFieldFunctionPointer(aInv, this.env); break; case TypeTags.NONE: dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name); resultType = symTable.semanticError; break; case TypeTags.SEMANTIC_ERROR: default: dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType()); resultType = symTable.semanticError; break; } } private boolean invalidModuleAliasUsage(BLangInvocation invocation) { Name pkgAlias = names.fromIdNode(invocation.pkgAlias); if (pkgAlias != Names.EMPTY) { dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE); return true; } return false; } public void visit(BLangLetExpression letExpression) { BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())), new Name(String.format("$let_symbol_%d$", letCount++)), env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner, letExpression.pos); letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env); } BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType); types.checkType(letExpression, exprType, this.expType); } private void checkInLangLib(BLangInvocation iExpr, BType varRefType) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value, iExpr.expr.getBType()); resultType = symTable.semanticError; return; } if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) { return; } checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType); } private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType, BSymbol langLibMethodSymbol) { if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) { return false; } String packageId = langLibMethodSymbol.pkgID.name.value; if (!modifierFunctions.containsKey(packageId)) { return false; } String funcName = langLibMethodSymbol.name.value; if (!modifierFunctions.get(packageId).contains(funcName)) { return false; } if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) { return false; } if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) { return false; } dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return true; } private boolean isFixedLengthList(BType type) { switch(type.tag) { case TypeTags.ARRAY: return (((BArrayType) type).state != BArrayState.OPEN); case TypeTags.TUPLE: return (((BTupleType) type).restType == null); case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType member : unionType.getMemberTypes()) { if (!isFixedLengthList(member)) { return false; } } return true; default: return false; } } private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) { String invocationName = iExpr.name.getValue(); if (!listLengthModifierFunctions.contains(invocationName)) { return; } if (isFixedLengthList(varRefType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName, varRefType); resultType = symTable.semanticError; return; } if (isShiftOnIncompatibleTuples(varRefType, invocationName)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName, varRefType); resultType = symTable.semanticError; return; } } private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) { if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) && hasDifferentTypeThanRest((BTupleType) varRefType)) { return true; } if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) { BUnionType unionVarRef = (BUnionType) varRefType; boolean allMemberAreFixedShapeTuples = true; for (BType member : unionVarRef.getMemberTypes()) { if (member.tag != TypeTags.TUPLE) { allMemberAreFixedShapeTuples = false; break; } if (!hasDifferentTypeThanRest((BTupleType) member)) { allMemberAreFixedShapeTuples = false; break; } } return allMemberAreFixedShapeTuples; } return false; } private boolean hasDifferentTypeThanRest(BTupleType tupleType) { if (tupleType.restType == null) { return false; } for (BType member : tupleType.getTupleTypes()) { if (!types.isSameType(tupleType.restType, member)) { return true; } } return false; } private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) { BType type = checkExpr(iExpr.expr, env); BLangIdentifier invocationIdentifier = iExpr.name; if (type == symTable.semanticError) { return false; } BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier), type.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD, invocationIdentifier, type); return false; } if (fieldSymbol.kind != SymbolKind.FUNCTION) { checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD, fieldSymbol.type); return false; } iExpr.symbol = fieldSymbol; iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType); checkInvocationParamAndReturnType(iExpr); iExpr.functionPointerInvocation = true; return true; } private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos, DiagnosticErrorCode errCode, Object... diagMsgArgs) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(pos, errCode, diagMsgArgs); resultType = symTable.semanticError; } else { checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol); } } @Override public void visit(BLangObjectConstructorExpression objectCtorExpression) { if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) { BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType(); if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) { BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType; objectType.typeIdSet = expObjType.typeIdSet; } else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) { if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) { dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR, objectCtorExpression.expectedType); resultType = symTable.semanticError; return; } } } visit(objectCtorExpression.typeInit); } private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) { if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) { return false; } Set<BType> visitedTypes = new HashSet<>(); if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) { return false; } return typeIdSets.size() <= 1; } private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) { if (type.tag == TypeTags.OBJECT) { var objectType = (BObjectType) type; typeIdSets.add(objectType.typeIdSet); return true; } if (type.tag == TypeTags.UNION) { if (!visitedTypes.add(type)) { return true; } for (BType member : ((BUnionType) type).getMemberTypes()) { if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) { return false; } } return true; } return false; } private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) { Set<BTypeIdSet> typeIdSets = new HashSet<>(); if (!isDefiniteObjectType(type, typeIdSets)) { return false; } if (typeIdSets.isEmpty()) { objectType.typeIdSet = BTypeIdSet.emptySet(); return true; } var typeIdIterator = typeIdSets.iterator(); if (typeIdIterator.hasNext()) { BTypeIdSet typeIdSet = typeIdIterator.next(); objectType.typeIdSet = typeIdSet; return true; } return true; } public void visit(BLangTypeInit cIExpr) { if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType); resultType = symTable.semanticError; return; } BType actualType; if (cIExpr.userDefinedType != null) { actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env); } else { actualType = expType; } if (actualType == symTable.semanticError) { resultType = symTable.semanticError; return; } if (actualType.tag == TypeTags.INTERSECTION) { actualType = ((BIntersectionType) actualType).effectiveType; } switch (actualType.tag) { case TypeTags.OBJECT: BObjectType actualObjectType = (BObjectType) actualType; if (isObjectConstructorExpr(cIExpr, actualObjectType)) { BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr, env); List<BLangType> typeRefs = classDefForConstructor.typeRefs; SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol); if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, false); } else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, true); } else { semanticAnalyzer.analyzeNode(classDefForConstructor, pkgEnv); } markConstructedObjectIsolatedness(actualObjectType); } if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return; } if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) { return; } } break; case TypeTags.STREAM: if (cIExpr.initInvocation.argExprs.size() > 1) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation); resultType = symTable.semanticError; return; } BStreamType actualStreamType = (BStreamType) actualType; if (actualStreamType.completionType != null) { BType completionType = actualStreamType.completionType; if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString()); resultType = symTable.semanticError; return; } } if (!cIExpr.initInvocation.argExprs.isEmpty()) { BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0); BType constructType = checkExpr(iteratorExpr, env, symTable.noType); BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType); if (constructType.tag != TypeTags.OBJECT) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType, BLangCompilerConstants.CLOSE_FUNC); if (closeFunc != null) { BType closeableIteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type; if (!types.isAssignable(constructType, closeableIteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } else { BType iteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type; if (!types.isAssignable(constructType, iteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType); if (nextReturnType != null) { types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } else { dlog.error(constructType.tsymbol.getPosition(), DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType); } } if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType, actualType); resultType = symTable.semanticError; return; } resultType = actualType; return; case TypeTags.UNION: List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType); BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType); cIExpr.initInvocation.setBType(symTable.nilType); if (matchedType.tag == TypeTags.OBJECT) { if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); actualType = matchedType; break; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) { return; } } } types.checkType(cIExpr, matchedType, expType); cIExpr.setBType(matchedType); resultType = matchedType; return; default: dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType); resultType = symTable.semanticError; return; } if (cIExpr.initInvocation.getBType() == null) { cIExpr.initInvocation.setBType(symTable.nilType); } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType()); resultType = types.checkType(cIExpr, actualTypeInitType, expType); } private BUnionType createNextReturnType(Location pos, BStreamType streamType) { BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS); recordType.restFieldType = symTable.noType; recordType.sealed = true; Name fieldName = Names.VALUE; BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC, fieldName, env.enclPkg.packageID, streamType.constraint, env.scope.owner, pos, VIRTUAL)); field.type = streamType.constraint; recordType.fields.put(field.name.value, field); recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, recordType, env.scope.owner, pos, VIRTUAL); recordType.tsymbol.scope = new Scope(env.scope.owner); recordType.tsymbol.scope.define(fieldName, field.symbol); LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(recordType); retTypeMembers.addAll(types.getAllTypes(streamType.completionType)); retTypeMembers.add(symTable.nilType); BUnionType unionType = BUnionType.create(null); unionType.addAll(retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL); return unionType; } private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) { if (!cIExpr.initInvocation.argExprs.isEmpty() && ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) { dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.name.value); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return false; } return true; } private BType getObjectConstructorReturnType(BType objType, BType initRetType) { if (initRetType.tag == TypeTags.UNION) { LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(objType); retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes()); retTypeMembers.remove(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, symTable.builtinPos, VIRTUAL); return unionType; } else if (initRetType.tag == TypeTags.NIL) { return objType; } return symTable.semanticError; } private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) { int objectCount = 0; for (BType memberType : lhsUnionType.getMemberTypes()) { int tag = memberType.tag; if (tag == TypeTags.OBJECT) { objectCount++; continue; } if (tag != TypeTags.INTERSECTION) { continue; } if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) { objectCount++; } } boolean containsSingleObject = objectCount == 1; List<BType> matchingLhsMemberTypes = new ArrayList<>(); for (BType memberType : lhsUnionType.getMemberTypes()) { if (memberType.tag != TypeTags.OBJECT) { continue; } if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol); } if (containsSingleObject) { return Collections.singletonList(memberType); } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc; if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) { matchingLhsMemberTypes.add(memberType); } } return matchingLhsMemberTypes; } private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) { if (matchingLhsMembers.isEmpty()) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } else if (matchingLhsMembers.size() == 1) { return matchingLhsMembers.get(0).tsymbol.type; } else { dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } } private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) { invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType)); if (function == null) { return invocationArguments.isEmpty(); } if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) { return true; } List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); List<BLangExpression> positionalArgs = new ArrayList<>(); for (BLangExpression argument : invocationArguments) { if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) { namedArgs.add((BLangNamedArgsExpression) argument); } else { positionalArgs.add(argument); } } List<BVarSymbol> requiredParams = function.symbol.params.stream() .filter(param -> !param.isDefaultable) .collect(Collectors.toList()); if (requiredParams.size() > invocationArguments.size()) { return false; } List<BVarSymbol> defaultableParams = function.symbol.params.stream() .filter(param -> param.isDefaultable) .collect(Collectors.toList()); int givenRequiredParamCount = 0; for (int i = 0; i < positionalArgs.size(); i++) { if (function.symbol.params.size() > i) { givenRequiredParamCount++; BVarSymbol functionParam = function.symbol.params.get(i); if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); continue; } if (function.symbol.restParam != null) { BType restParamType = ((BArrayType) function.symbol.restParam.type).eType; if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) { return false; } continue; } return false; } for (BLangNamedArgsExpression namedArg : namedArgs) { boolean foundNamedArg = false; List<BVarSymbol> params = function.symbol.params; for (int i = givenRequiredParamCount; i < params.size(); i++) { BVarSymbol functionParam = params.get(i); if (!namedArg.name.value.equals(functionParam.name.value)) { continue; } foundNamedArg = true; BType namedArgExprType = checkExpr(namedArg.expr, env); if (!types.isAssignable(functionParam.type, namedArgExprType)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); } if (!foundNamedArg) { return false; } } return requiredParams.size() <= 0; } public void visit(BLangWaitForAllExpr waitForAllExpr) { switch (expType.tag) { case TypeTags.RECORD: checkTypesForRecords(waitForAllExpr); break; case TypeTags.MAP: checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint); LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypesForMap.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypesForMap.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap); resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol); break; case TypeTags.NONE: case TypeTags.ANY: checkTypesForMap(waitForAllExpr, expType); LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypes.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintType = BUnionType.create(null, memberTypes); resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol); break; default: dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos)); resultType = symTable.semanticError; break; } waitForAllExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType); } } private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr, Location pos) { BRecordType retType = new BRecordType(null, Flags.ANONYMOUS); List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs; for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BLangIdentifier fieldName; if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { fieldName = keyVal.key; } else { fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName; } BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName)); BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type; BField field = new BField(names.fromIdNode(keyVal.key), null, new BVarSymbol(0, names.fromIdNode(keyVal.key), env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL)); retType.fields.put(field.name.value, field); } retType.restFieldType = symTable.noType; retType.sealed = true; retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null, pos, VIRTUAL); return retType; } private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType(); if (bType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) bType).constraint); } else { memberTypes.add(bType); } } return memberTypes; } private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) { List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs; keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType)); } private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) { List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs(); Map<String, BField> lhsFields = ((BRecordType) expType).fields; if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitExpr, waitExpr.pos)); resultType = symTable.semanticError; return; } for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) { String key = keyVal.key.value; if (!lhsFields.containsKey(key)) { if (((BRecordType) expType).sealed) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType); resultType = symTable.semanticError; } else { BType restFieldType = ((BRecordType) expType).restFieldType; checkWaitKeyValExpr(keyVal, restFieldType); } } else { checkWaitKeyValExpr(keyVal, lhsFields.get(key).type); } } checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos); if (symTable.semanticError != resultType) { resultType = expType; } } private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs, Location pos) { type.fields.values().forEach(field -> { boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value)); if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); } }); } private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) { BLangExpression expr; if (keyVal.keyExpr != null) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode (((BLangSimpleVarRef) keyVal.keyExpr).variableName)); keyVal.keyExpr.setBType(symbol.type); expr = keyVal.keyExpr; } else { expr = keyVal.valueExpr; } BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null); checkExpr(expr, env, futureType); setEventualTypeForExpression(expr, type); } private void setEventualTypeForExpression(BLangExpression expression, BType currentExpectedType) { if (expression == null) { return; } if (isSimpleWorkerReference(expression)) { return; } BFutureType futureType = (BFutureType) expression.expectedType; BType currentType = futureType.constraint; if (types.containsErrorType(currentType)) { return; } BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType); if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) && !types.isAssignable(eventualType, currentExpectedType)) { dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); } futureType.constraint = eventualType; } private void setEventualTypeForWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (types.containsErrorType(resultType))) { return; } if (isSimpleWorkerReference(expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (expression.getKind() != NodeKind.BINARY_EXPR) || (types.containsErrorType(resultType))) { return; } if (types.containsErrorType(resultType)) { return; } if (!isReferencingNonWorker((BLangBinaryExpr) expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private boolean isSimpleWorkerReference(BLangExpression expression) { if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression); BSymbol varRefSymbol = simpleVarRef.symbol; if (varRefSymbol == null) { return false; } if (workerExists(env, simpleVarRef.variableName.value)) { return true; } return false; } private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) { BLangExpression lhsExpr = binaryExpr.lhsExpr; BLangExpression rhsExpr = binaryExpr.rhsExpr; if (isReferencingNonWorker(lhsExpr)) { return true; } return isReferencingNonWorker(rhsExpr); } private boolean isReferencingNonWorker(BLangExpression expression) { if (expression.getKind() == NodeKind.BINARY_EXPR) { return isReferencingNonWorker((BLangBinaryExpr) expression); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression; BSymbol varRefSymbol = simpleVarRef.symbol; String varRefSymbolName = varRefSymbol.getName().value; if (workerExists(env, varRefSymbolName)) { return false; } } return true; } public void visit(BLangTernaryExpr ternaryExpr) { BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType); SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env); BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType); SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env); BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType); if (condExprType == symTable.semanticError || thenType == symTable.semanticError || elseType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isAssignable(elseType, thenType)) { resultType = thenType; } else if (types.isAssignable(thenType, elseType)) { resultType = elseType; } else { dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType); resultType = symTable.semanticError; } } else { resultType = expType; } } public void visit(BLangWaitExpr waitExpr) { expType = new BFutureType(TypeTags.FUTURE, expType, null); checkExpr(waitExpr.getExpression(), env, expType); if (resultType.tag == TypeTags.UNION) { LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>()); if (memberTypes.size() == 1) { resultType = memberTypes.toArray(new BType[0])[0]; } else { resultType = BUnionType.create(null, memberTypes); } } else if (resultType != symTable.semanticError) { resultType = ((BFutureType) resultType).constraint; } BLangExpression waitFutureExpression = waitExpr.getExpression(); if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) { setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos); } else { setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos); } waitExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint); } } private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) { for (BType memberType : unionType.getMemberTypes()) { if (memberType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) memberType).constraint); } else { memberTypes.add(memberType); } } return memberTypes; } @Override public void visit(BLangTrapExpr trapExpr) { boolean firstVisit = trapExpr.expr.getBType() == null; BType actualType; BType exprType = checkExpr(trapExpr.expr, env, expType); boolean definedWithVar = expType == symTable.noType; if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = trapExpr.getBType(); exprType = trapExpr.expr.getBType(); } } if (expType == symTable.semanticError || exprType == symTable.semanticError) { actualType = symTable.semanticError; } else { LinkedHashSet<BType> resultTypes = new LinkedHashSet<>(); if (exprType.tag == TypeTags.UNION) { resultTypes.addAll(((BUnionType) exprType).getMemberTypes()); } else { resultTypes.add(exprType); } resultTypes.add(symTable.errorType); actualType = BUnionType.create(null, resultTypes); } resultType = types.checkType(trapExpr, actualType, expType); if (definedWithVar && resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType); } } private BType checkExpectedTypeCompatibility(BLangExpression expr, BType expectedType, SymbolEnv env) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); expr.cloneAttempt++; BType exprCompatibleType = checkExpr(nodeCloner.clone(expr), env, expectedType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (errorCount == 0 && exprCompatibleType != symTable.semanticError) { return checkExpr(expr, env, expectedType); } else { return checkExpr(expr, env); } } private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) { while (env != null && env.node != node) { env = env.enclEnv; } return env != null && env.enclEnv != null ? env.enclEnv.createClone() : new SymbolEnv(node, null); } private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) { SymbolEnv clone = env.createClone(); while (clone != null && clone.node != node) { clone = clone.enclEnv; } if (clone != null) { clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv)); } else { clone = new SymbolEnv(node, null); } return clone; } private BLangNode getLastInputNodeFromEnv(SymbolEnv env) { while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) { env = env.enclEnv; } return env != null ? env.node : null; } public void visit(BLangTransactionalExpr transactionalExpr) { resultType = types.checkType(transactionalExpr, symTable.booleanType, expType); } public void visit(BLangCommitExpr commitExpr) { BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(commitExpr, actualType, expType); } private BType getXMLConstituents(BType type) { BType constituent = null; if (type.tag == TypeTags.XML) { constituent = ((BXMLType) type).constraint; } else if (TypeTags.isXMLNonSequenceType(type.tag)) { constituent = type; } return constituent; } private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) { if (expType.tag != TypeTags.DECIMAL) { return; } switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: checkExpr(binaryExpr.lhsExpr, env, expType); checkExpr(binaryExpr.rhsExpr, env, expType); break; default: break; } } public void visit(BLangElvisExpr elvisExpr) { BType lhsType = checkExpr(elvisExpr.lhsExpr, env); BType actualType = symTable.semanticError; if (lhsType != symTable.semanticError) { if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) { BUnionType unionType = (BUnionType) lhsType; LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream() .filter(type -> type.tag != TypeTags.NIL) .collect(Collectors.toCollection(LinkedHashSet::new)); if (memberTypes.size() == 1) { actualType = memberTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, memberTypes); } } else { dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS, lhsType); } } BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType); BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isSameType(rhsReturnType, lhsReturnType)) { resultType = lhsReturnType; } else { dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType); resultType = symTable.semanticError; } } else { resultType = expType; } } @Override public void visit(BLangGroupExpr groupExpr) { resultType = checkExpr(groupExpr.expression, env, expType); } public void visit(BLangTypedescExpr accessExpr) { if (accessExpr.resolvedType == null) { accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env); } int resolveTypeTag = accessExpr.resolvedType.tag; final BType actualType; if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) { actualType = new BTypedescType(accessExpr.resolvedType, null); } else { actualType = accessExpr.resolvedType; } resultType = types.checkType(accessExpr, actualType, expType); } public void visit(BLangUnaryExpr unaryExpr) { BType exprType; BType actualType = symTable.semanticError; if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = exprType; } } else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = new BTypedescType(exprType, null); } } else { boolean decimalNegation = OperatorKind.SUB.equals(unaryExpr.operator) && expType.tag == TypeTags.DECIMAL; boolean isAdd = OperatorKind.ADD.equals(unaryExpr.operator); exprType = (decimalNegation || isAdd) ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType); if (symbol == symTable.notFoundSymbol) { dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES, unaryExpr.operator, exprType); } else { unaryExpr.opSymbol = (BOperatorSymbol) symbol; actualType = symbol.type.getReturnType(); } } } resultType = types.checkType(unaryExpr, actualType, expType); } public void visit(BLangTypeConversionExpr conversionExpr) { BType actualType = symTable.semanticError; for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) { annAttachment.attachPoints.add(AttachPoint.Point.TYPE); semanticAnalyzer.analyzeNode(annAttachment, this.env); } BLangExpression expr = conversionExpr.expr; if (conversionExpr.typeNode == null) { if (!conversionExpr.annAttachments.isEmpty()) { resultType = checkExpr(expr, env, this.expType); } return; } BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos, symResolver.resolveTypeNode(conversionExpr.typeNode, env)); conversionExpr.targetType = targetType; boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); expr.cloneAttempt++; BType exprCompatibleType = checkExpr(nodeCloner.clone(expr), env, targetType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) { checkExpr(expr, env, targetType); } else { checkExpr(expr, env, symTable.noType); } BType exprType = expr.getBType(); if (types.isTypeCastable(expr, exprType, targetType, this.env)) { actualType = targetType; } else if (exprType != symTable.semanticError && exprType != symTable.noType) { dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType); } resultType = types.checkType(conversionExpr, actualType, this.expType); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType()); bLangLambdaFunction.capturedClosureEnv = env.createClone(); if (!this.nonErrorLoggingCheck) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); } resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BType expectedType = expType; if (expectedType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expectedType; BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE) .collect(Collectors.collectingAndThen(Collectors.toList(), list -> { if (list.size() != 1) { return null; } return list.get(0); } )); if (invokableType != null) { expectedType = invokableType; } } if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS); resultType = symTable.semanticError; return; } BInvokableType expectedInvocation = (BInvokableType) expectedType; populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes); bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType)); if (expectedInvocation.retType.tag == TypeTags.NONE) { expectedInvocation.retType = bLangArrowFunction.body.expr.getBType(); } resultType = bLangArrowFunction.funcType = expectedInvocation; } public void visit(BLangXMLQName bLangXMLQName) { String prefix = bLangXMLQName.prefix.value; resultType = types.checkType(bLangXMLQName, symTable.stringType, expType); if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty() && bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (bLangXMLQName.prefix.value.isEmpty()) { return; } BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix)); if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { return; } if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { logUndefinedSymbolError(bLangXMLQName.pos, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) { xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value, (BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos); } if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) { resultType = symTable.semanticError; return; } bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol; bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI; } private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix, BPackageSymbol pkgSymbol, Location pos) { BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env, names.fromString(localname), SymTag.CONSTANT); if (constSymbol == symTable.notFoundSymbol) { if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname); } return null; } BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol; if (constantSymbol.literalType.tag != TypeTags.STRING) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType); return null; } String constVal = (String) constantSymbol.value.value; int s = constVal.indexOf('{'); int e = constVal.lastIndexOf('}'); if (e > s + 1) { pkgSymbol.isUsed = true; String nsURI = constVal.substring(s + 1, e); String local = constVal.substring(e); return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos, SOURCE); } dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname); return null; } public void visit(BLangXMLAttribute bLangXMLAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env); BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name; checkExpr(name, xmlAttributeEnv, symTable.stringType); if (name.prefix.value.isEmpty()) { name.namespaceURI = null; } checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType); symbolEnter.defineNode(bLangXMLAttribute, env); } public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env); Set<String> usedPrefixes = new HashSet<>(); BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix; if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) { usedPrefixes.add(elemNamePrefix.value); } for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) { if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) { BLangXMLQuotedString value = attribute.value; if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) { dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION); } checkExpr(attribute, xmlElementEnv, symTable.noType); } BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix; if (prefix != null && !prefix.value.isEmpty()) { usedPrefixes.add(prefix.value); } } bLangXMLElementLiteral.attributes.forEach(attribute -> { if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) { checkExpr(attribute, xmlElementEnv, symTable.noType); } }); Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv); Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX); if (namespaces.containsKey(defaultNs)) { bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs); } for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) { if (usedPrefixes.contains(nsEntry.getKey().value)) { bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue()); } } validateTags(bLangXMLElementLiteral, xmlElementEnv); bLangXMLElementLiteral.modifiedChildren = concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType, this.expType); if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) { markChildrenAsImmutable(bLangXMLElementLiteral); } } private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) { BLangXMLQName attrName = (BLangXMLQName) attribute.name; return (attrName.prefix.value.isEmpty() && attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) || attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE); } public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) { if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) { return symTable.xmlElementType; } if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) { return symTable.xmlTextType; } if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) { return symTable.xmlPIType; } return symTable.xmlCommentType; } public void muteErrorLog() { this.nonErrorLoggingCheck = true; this.dlog.mute(); } public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) { this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } } public BType getXMLSequenceType(BType xmlSubType) { switch (xmlSubType.tag) { case TypeTags.XML_ELEMENT: return new BXMLType(symTable.xmlElementType, null); case TypeTags.XML_COMMENT: return new BXMLType(symTable.xmlCommentType, null); case TypeTags.XML_PI: return new BXMLType(symTable.xmlPIType, null); default: return symTable.xmlTextType; } } public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT && expType != symTable.noType) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, "XML Sequence"); resultType = symTable.semanticError; return; } List<BType> xmlTypesInSequence = new ArrayList<>(); for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) { resultType = checkExpr(expressionItem, env, expType); if (!xmlTypesInSequence.contains(resultType)) { xmlTypesInSequence.add(resultType); } } if (expType.tag == TypeTags.XML || expType == symTable.noType) { if (xmlTypesInSequence.size() == 1) { resultType = getXMLSequenceType(xmlTypesInSequence.get(0)); return; } resultType = symTable.xmlType; return; } if (expType.tag == TypeTags.XML_TEXT) { resultType = symTable.xmlTextType; return; } for (BType item : ((BUnionType) expType).getMemberTypes()) { if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.xmlType); resultType = symTable.semanticError; return; } } resultType = symTable.xmlType; } public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) { List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments; checkStringTemplateExprs(literalValues); BLangExpression xmlExpression = literalValues.get(0); if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL && ((String) ((BLangLiteral) xmlExpression).value).isEmpty()) { resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType); return; } resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType); } public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) { checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType, this.expType); } public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) { checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType); checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType); } public void visit(BLangXMLQuotedString bLangXMLQuotedString) { checkStringTemplateExprs(bLangXMLQuotedString.textFragments); resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { dlog.error(xmlAttributeAccessExpr.pos, DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS); resultType = symTable.semanticError; } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { checkStringTemplateExprs(stringTemplateLiteral.exprs); resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType); if (type == symTable.semanticError) { resultType = type; return; } BObjectType literalType = (BObjectType) type; BType stringsType = literalType.fields.get("strings").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } BType insertionsType = literalType.fields.get("insertions").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } resultType = type; } private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) { if (expType == symTable.noType || containsAnyType(expType)) { return symTable.rawTemplateType; } BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos); BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE); if (type == symTable.semanticError) { return type; } if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type); return symTable.semanticError; } BObjectType litObjType = (BObjectType) type; BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol; if (litObjType.fields.size() > 2) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType); type = symTable.semanticError; } if (!objTSymbol.attachedFuncs.isEmpty()) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType); type = symTable.semanticError; } return type; } private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType, DiagnosticCode code, Location pos) { BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType : ((BIntersectionType) fieldType).effectiveType; boolean errored = false; if (listType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) listType; if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) { dlog.error(pos, code, arrayType.size, exprs.size()); return false; } for (BLangExpression expr : exprs) { errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored; } } else if (listType.tag == TypeTags.TUPLE) { BTupleType tupleType = (BTupleType) listType; final int size = exprs.size(); final int requiredItems = tupleType.tupleTypes.size(); if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) { dlog.error(pos, code, requiredItems, size); return false; } int i; List<BType> memberTypes = tupleType.tupleTypes; for (i = 0; i < requiredItems; i++) { errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored; } if (size > requiredItems) { for (; i < size; i++) { errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored; } } } else { throw new IllegalStateException("Expected a list type, but found: " + listType); } return errored; } private boolean containsAnyType(BType type) { if (type == symTable.anyType) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().contains(symTable.anyType); } return false; } private BType getCompatibleRawTemplateType(BType expType, Location pos) { if (expType.tag != TypeTags.UNION) { return expType; } BUnionType unionType = (BUnionType) expType; List<BType> compatibleTypes = new ArrayList<>(); for (BType type : unionType.getMemberTypes()) { if (types.isAssignable(type, symTable.rawTemplateType)) { compatibleTypes.add(type); } } if (compatibleTypes.size() == 0) { return expType; } if (compatibleTypes.size() > 1) { dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType, expType); return symTable.semanticError; } return compatibleTypes.get(0); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { checkExpr(intRangeExpression.startExpr, env, symTable.intType); checkExpr(intRangeExpression.endExpr, env, symTable.intType); resultType = new BArrayType(symTable.intType); } @Override public void visit(BLangRestArgsExpression bLangRestArgExpression) { resultType = checkExpr(bLangRestArgExpression.expr, env, expType); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { if (expType.tag != TypeTags.TYPEDESC) { dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc); resultType = symTable.semanticError; return; } resultType = expType; } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType); bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType()); } @Override public void visit(BLangMatchExpression bLangMatchExpression) { SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env); checkExpr(bLangMatchExpression.expr, matchExprEnv); bLangMatchExpression.patternClauses.forEach(pattern -> { if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) { symbolEnter.defineNode(pattern.variable, matchExprEnv); } checkExpr(pattern.expr, matchExprEnv, expType); pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv)); }); LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression); BType actualType; if (matchExprTypes.contains(symTable.semanticError)) { actualType = symTable.semanticError; } else if (matchExprTypes.size() == 1) { actualType = matchExprTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, matchExprTypes); } resultType = types.checkType(bLangMatchExpression, actualType, expType); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangQueryExpr queryExpr) { if (prevEnvs.empty()) { prevEnvs.push(env); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek()); selectClauses.push(queryExpr.getSelectClause()); List<BLangNode> clauses = queryExpr.getQueryClauses(); BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection(); clauses.forEach(clause -> clause.accept(this)); BType actualType = resolveQueryType(queryEnvs.peek(), selectClauses.peek().expression, collectionNode.getBType(), expType, queryExpr); actualType = (actualType == symTable.semanticError) ? actualType : types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); if (actualType.tag == TypeTags.TABLE) { BTableType tableType = (BTableType) actualType; tableType.constraintPos = queryExpr.pos; tableType.isTypeInlineDefined = true; if (!validateTableType(tableType, null)) { resultType = symTable.semanticError; return; } } resultType = actualType; } private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType, BType targetType, BLangQueryExpr queryExpr) { List<BType> resultTypes = types.getAllTypes(targetType).stream() .filter(t -> !types.isAssignable(t, symTable.errorType)) .filter(t -> !types.isAssignable(t, symTable.nilType)) .collect(Collectors.toList()); if (resultTypes.isEmpty()) { resultTypes.add(symTable.noType); } BType actualType = symTable.semanticError; List<BType> selectTypes = new ArrayList<>(); List<BType> resolvedTypes = new ArrayList<>(); BType selectType, resolvedType; for (BType type : resultTypes) { switch (type.tag) { case TypeTags.ARRAY: selectType = checkExpr(selectExp, env, ((BArrayType) type).eType); resolvedType = new BArrayType(selectType); break; case TypeTags.TABLE: selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint, true, true)); resolvedType = symTable.tableType; break; case TypeTags.STREAM: selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint, true, true)); resolvedType = symTable.streamType; break; case TypeTags.STRING: case TypeTags.XML: selectType = checkExpr(selectExp, env, type); resolvedType = selectType; break; case TypeTags.NONE: default: selectType = checkExpr(selectExp, env, type); resolvedType = getNonContextualQueryType(selectType, collectionType); break; } if (selectType != symTable.semanticError) { if (resolvedType.tag == TypeTags.STREAM) { queryExpr.isStream = true; } if (resolvedType.tag == TypeTags.TABLE) { queryExpr.isTable = true; } selectTypes.add(selectType); resolvedTypes.add(resolvedType); } } if (selectTypes.size() == 1) { BType errorType = getErrorType(collectionType); selectType = selectTypes.get(0); if (queryExpr.isStream) { return new BStreamType(TypeTags.STREAM, selectType, errorType, null); } else if (queryExpr.isTable) { actualType = getQueryTableType(queryExpr, selectType); } else { actualType = resolvedTypes.get(0); } if (errorType != null && errorType.tag != TypeTags.NIL) { return BUnionType.create(null, actualType, errorType); } else { return actualType; } } else if (selectTypes.size() > 1) { dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes); return actualType; } else { return actualType; } } private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) { final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null); if (!queryExpr.fieldNameIdentifierList.isEmpty()) { tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream() .map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); return BUnionType.create(null, tableType, symTable.errorType); } return tableType; } private BType getErrorType(BType collectionType) { if (collectionType.tag == TypeTags.SEMANTIC_ERROR) { return null; } BType returnType = null, errorType = null; switch (collectionType.tag) { case TypeTags.STREAM: errorType = ((BStreamType) collectionType).completionType; break; case TypeTags.OBJECT: returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType); break; default: BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); if (itrSymbol == this.symTable.notFoundSymbol) { return null; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol; returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType); } if (returnType != null) { List<BType> errorTypes = types.getAllTypes(returnType).stream() .filter(t -> types.isAssignable(t, symTable.errorType)) .collect(Collectors.toList()); if (!errorTypes.isEmpty()) { if (errorTypes.size() == 1) { errorType = errorTypes.get(0); } else { errorType = BUnionType.create(null, errorTypes.toArray(new BType[0])); } } } return errorType; } private BType getNonContextualQueryType(BType staticType, BType basicType) { BType resultType; switch (basicType.tag) { case TypeTags.TABLE: resultType = symTable.tableType; break; case TypeTags.STREAM: resultType = symTable.streamType; break; case TypeTags.XML: resultType = new BXMLType(staticType, null); break; case TypeTags.STRING: resultType = symTable.stringType; break; default: resultType = new BArrayType(staticType); break; } return resultType; } @Override public void visit(BLangQueryAction queryAction) { if (prevEnvs.empty()) { prevEnvs.push(env); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek()); selectClauses.push(null); BLangDoClause doClause = queryAction.getDoClause(); List<BLangNode> clauses = queryAction.getQueryClauses(); clauses.forEach(clause -> clause.accept(this)); semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek())); BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangFromClause fromClause) { SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop()); fromClause.env = fromEnv; queryEnvs.push(fromEnv); checkExpr(fromClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(fromClause); handleInputClauseVariables(fromClause, queryEnvs.peek()); } @Override public void visit(BLangJoinClause joinClause) { SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop()); joinClause.env = joinEnv; queryEnvs.push(joinEnv); checkExpr(joinClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(joinClause); handleInputClauseVariables(joinClause, queryEnvs.peek()); if (joinClause.onClause != null) { ((BLangOnClause) joinClause.onClause).accept(this); } } @Override public void visit(BLangLetClause letClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop()); letClause.env = letEnv; queryEnvs.push(letEnv); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv); } } @Override public void visit(BLangWhereClause whereClause) { whereClause.env = handleFilterClauses(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop()); selectClause.env = letEnv; queryEnvs.push(letEnv); } @Override public void visit(BLangDoClause doClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop()); doClause.env = letEnv; queryEnvs.push(letEnv); } @Override public void visit(BLangOnConflictClause onConflictClause) { BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType); if (!types.isAssignable(exprType, symTable.errorType)) { dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, symTable.errorType, exprType); } } @Override public void visit(BLangLimitClause limitClause) { BType exprType = checkExpr(limitClause.expression, queryEnvs.peek()); if (!types.isAssignable(exprType, symTable.intType)) { dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType, exprType); } } @Override public void visit(BLangOnClause onClause) { BType lhsType, rhsType; BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek()); onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode); lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv); onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode); rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek()); if (!types.isAssignable(lhsType, rhsType)) { dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType); } } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.env = queryEnvs.peek(); for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env); if (!types.isOrderedType(exprType, false)) { dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED); } } } @Override public void visit(BLangDo doNode) { if (doNode.onFailClause != null) { doNode.onFailClause.accept(this); } } public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> stmt.accept(this)); } private SymbolEnv handleFilterClauses (BLangExpression filterExpression) { checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType); BType actualType = filterExpression.getBType(); if (TypeTags.TUPLE == actualType.tag) { dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.booleanType, actualType); } SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, selectClauses.peek(), queryEnvs.pop()); queryEnvs.push(filterEnv); return filterEnv; } private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) { if (bLangInputClause.variableDefinitionNode == null) { return; } BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable(); if (bLangInputClause.isDeclaredWithVar) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv); if (types.isAssignable(bLangInputClause.varType, typeNodeType)) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } if (typeNodeType != symTable.semanticError) { dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bLangInputClause.varType, typeNodeType); } semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) { String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic"; BLangExpression exprWithCheckingKeyword = checkedExpr.expr; boolean firstVisit = exprWithCheckingKeyword.getBType() == null; BType checkExprCandidateType; if (expType == symTable.noType) { checkExprCandidateType = symTable.noType; } else { checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType); } boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); checkedExpr.expr.cloneAttempt++; BLangExpression clone = nodeCloner.clone(checkedExpr.expr); BType rhsType = checkExpr(clone, env, checkExprCandidateType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } BType errorType = getNonDefaultErrorErrorComponentsOrDefaultError(rhsType); BType typeOfExprWithCheckingKeyword; if (expType == symTable.noType) { typeOfExprWithCheckingKeyword = symTable.noType; } else { typeOfExprWithCheckingKeyword = BUnionType.create(null, expType, errorType); } if (exprWithCheckingKeyword.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && operatorType.equals("check") && types.isUnionOfSimpleBasicTypes(expType)) { BType varRefType = getTypeOfExprInFieldAccess(((BLangFieldBasedAccess) exprWithCheckingKeyword).expr); if (types.isLax(varRefType)) { ArrayList<BLangExpression> argExprs = new ArrayList<>(); BType typedescType = new BTypedescType(expType, null); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = expType; typedescExpr.setBType(typedescType); argExprs.add(typedescExpr); BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE, argExprs, exprWithCheckingKeyword, checkedExpr.pos); invocation.symbol = symResolver.lookupLangLibMethod(typeOfExprWithCheckingKeyword, names.fromString(invocation.name.value)); invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); checkedExpr.expr = invocation; } } BType exprType = checkExpr(checkedExpr.expr, env, typeOfExprWithCheckingKeyword); if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = checkedExpr.getBType(); exprType = checkedExpr.expr.getBType(); } } boolean isErrorType = types.isAssignable(exprType, symTable.errorType); if (exprType.tag != TypeTags.UNION && !isErrorType) { if (exprType.tag == TypeTags.READONLY) { checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{ add(symTable.errorType); }}; resultType = symTable.anyAndReadonly; return; } else if (exprType != symTable.semanticError) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); } checkedExpr.setBType(symTable.semanticError); return; } List<BType> errorTypes = new ArrayList<>(); List<BType> nonErrorTypes = new ArrayList<>(); if (!isErrorType) { for (BType memberType : ((BUnionType) exprType).getMemberTypes()) { if (memberType.tag == TypeTags.READONLY) { errorTypes.add(symTable.errorType); nonErrorTypes.add(symTable.anyAndReadonly); continue; } if (types.isAssignable(memberType, symTable.errorType)) { errorTypes.add(memberType); continue; } nonErrorTypes.add(memberType); } } else { errorTypes.add(exprType); } checkedExpr.equivalentErrorTypeList = errorTypes; if (errorTypes.isEmpty()) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); checkedExpr.setBType(symTable.semanticError); return; } BType actualType; if (nonErrorTypes.size() == 0) { actualType = symTable.neverType; } else if (nonErrorTypes.size() == 1) { actualType = nonErrorTypes.get(0); } else { actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes)); } if (actualType.tag == TypeTags.NEVER) { dlog.error(checkedExpr.pos, DiagnosticErrorCode.NEVER_TYPE_NOT_ALLOWED_WITH_CHECKED_EXPR, operatorType); } resultType = types.checkType(checkedExpr, actualType, expType); } private BType getNonDefaultErrorErrorComponentsOrDefaultError(BType rhsType) { List<BType> errorTypes = new ArrayList<>(); for (BType t : types.getAllTypes(rhsType)) { if (!types.isSameType(t, symTable.errorType) && types.isAssignable(t, symTable.errorType)) { errorTypes.add(t); } } if (!errorTypes.isEmpty()) { if (errorTypes.size() == 1) { return errorTypes.get(0); } else { return BUnionType.create(null, errorTypes.toArray(new BType[0])); } } return symTable.errorType; } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { resultType = serviceConstructorExpr.serviceNode.symbol.type; } @Override public void visit(BLangTypeTestExpr typeTestExpr) { typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env)); checkExpr(typeTestExpr.expr, env); resultType = types.checkType(typeTestExpr, symTable.booleanType, expType); } public void visit(BLangAnnotAccessExpr annotAccessExpr) { checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc); BType actualType = symTable.semanticError; BSymbol symbol = this.symResolver.resolveAnnotation(annotAccessExpr.pos, env, names.fromString(annotAccessExpr.pkgAlias.getValue()), names.fromString(annotAccessExpr.annotationName.getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION, annotAccessExpr.annotationName.getValue()); } else { annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol; BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType : ((BAnnotationSymbol) symbol).attachedType.type; actualType = BUnionType.create(null, annotType, symTable.nilType); } this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType); } private boolean isValidVariableReference(BLangExpression varRef) { switch (varRef.getKind()) { case SIMPLE_VARIABLE_REF: case RECORD_VARIABLE_REF: case TUPLE_VARIABLE_REF: case ERROR_VARIABLE_REF: case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case XML_ATTRIBUTE_ACCESS_EXPR: return true; default: dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType()); return false; } } private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) { if (origTargetType == symTable.readonlyType) { if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } if (origTargetType.tag != TypeTags.UNION) { return origTargetType; } boolean hasReadOnlyType = false; LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) { if (memberType == symTable.readonlyType) { hasReadOnlyType = true; continue; } nonReadOnlyTypes.add(memberType); } if (!hasReadOnlyType) { return origTargetType; } if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes); nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>())); return nonReadOnlyUnion; } private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv)); return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType); } private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) { if (paramTypes.size() != bLangArrowFunction.params.size()) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH, paramTypes.size(), bLangArrowFunction.params.size()); resultType = symTable.semanticError; bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError)); return; } for (int i = 0; i < bLangArrowFunction.params.size(); i++) { BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i); BType bType = paramTypes.get(i); BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); valueTypeNode.setTypeKind(bType.getKind()); valueTypeNode.pos = symTable.builtinPos; paramIdentifier.setTypeNode(valueTypeNode); paramIdentifier.setBType(bType); } } private void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) { if (env.enclVarSym == varSymbol) { dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name); } } public List<BType> getListWithErrorTypes(int count) { List<BType> list = new ArrayList<>(count); for (int i = 0; i < count; i++) { list.add(symTable.semanticError); } return list; } private void checkFunctionInvocationExpr(BLangInvocation iExpr) { Name funcName = names.fromIdNode(iExpr.name); Name pkgAlias = names.fromIdNode(iExpr.pkgAlias); BSymbol funcSymbol = symTable.notFoundSymbol; BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr)); if (pkgSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias); } else { if (funcSymbol == symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { funcSymbol = symbol; } if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) && (symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) { funcSymbol = symbol; } } if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) { BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol; } } if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) { if (!missingNodesHelper.isMissingNode(funcName)) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName); } iExpr.argExprs.forEach(arg -> checkExpr(arg, env)); resultType = symTable.semanticError; return; } if (isFunctionPointer(funcSymbol)) { iExpr.functionPointerInvocation = true; markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID); if (langLibPackageID) { this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); } iExpr.symbol = funcSymbol; checkInvocationParamAndReturnType(iExpr); if (langLibPackageID && !iExpr.argExprs.isEmpty()) { checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol); } } protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) { BLangInvokableNode encInvokable = env.enclInvokable; if (symbol.closure == true || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE && env.node.getKind() != NodeKind.ARROW_EXPR) { return; } if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) && !isFunctionArgument(symbol, encInvokable.requiredParams)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.node.getKind() == NodeKind.ARROW_EXPR && !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; ((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } BLangNode node = env.node; SymbolEnv cEnv = env; while (node != null && node.getKind() != NodeKind.FUNCTION) { if (node.getKind() == NodeKind.ON_FAIL) { BLangOnFailClause onFailClause = (BLangOnFailClause) node; SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !resolvedSymbol.closure) { onFailClause.possibleClosureSymbols.add(resolvedSymbol); } break; } else { SymbolEnv enclEnv = cEnv.enclEnv; if (enclEnv == null) { break; } cEnv = enclEnv; node = cEnv.node; } } } private boolean isNotFunction(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION || (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) { return false; } if (isFunctionPointer(funcSymbol)) { return false; } return true; } private boolean isFunctionPointer(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) { return false; } return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE && funcSymbol.kind == SymbolKind.FUNCTION && (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE; } private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr, BType expectedType) { List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) { BType target = getErrorCtorNamedArgTargetType(namedArgsExpression, expectedType); BLangNamedArgsExpression clone = nodeCloner.clone(namedArgsExpression); BType type = checkExpr(clone, env, target); if (type == symTable.semanticError) { checkExpr(namedArgsExpression, env); } else { checkExpr(namedArgsExpression, env, target); } namedArgs.add(namedArgsExpression); } return namedArgs; } private BType getErrorCtorNamedArgTargetType(BLangNamedArgsExpression namedArgsExpression, BType expectedType) { if (expectedType == symTable.semanticError) { return symTable.semanticError; } if (expectedType.tag == TypeTags.MAP) { return ((BMapType) expectedType).constraint; } if (expectedType.tag != TypeTags.RECORD) { return symTable.semanticError; } BRecordType recordType = (BRecordType) expectedType; BField targetField = recordType.fields.get(namedArgsExpression.name.value); if (targetField != null) { return targetField.type; } if (!recordType.sealed && !recordType.fields.isEmpty()) { dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name, recordType); } return recordType.sealed ? symTable.noType : recordType.restFieldType; } private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) { if (objectType.getKind() == TypeKind.SERVICE && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION); return; } Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value)); BSymbol funcSymbol = symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol); if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) { if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value, objectType); resultType = symTable.semanticError; return; } } else { iExpr.symbol = funcSymbol; } if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } checkInvocationParamAndReturnType(iExpr); } private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) { BLangValueExpression varRef = (BLangValueExpression) aInv.expr; if (((varRef.symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType()); this.resultType = symTable.semanticError; aInv.symbol = symTable.notFoundSymbol; return; } BVarSymbol epSymbol = (BVarSymbol) varRef.symbol; Name remoteMethodQName = names .fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value)); Name actionName = names.fromIdNode(aInv.name); BSymbol remoteFuncSymbol = symResolver .lookupMemberSymbol(aInv.pos, epSymbol.type.tsymbol.scope, env, remoteMethodQName, SymTag.FUNCTION); if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) { dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType); resultType = symTable.semanticError; return; } if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName); this.resultType = symTable.semanticError; return; } if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && Symbols.isFlagOn(expType.flags, Flags.CLIENT) && types.isNeverTypeOrStructureTypeWithARequiredNeverMember ((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL); } aInv.symbol = remoteFuncSymbol; checkInvocationParamAndReturnType(aInv); } private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) { return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol; } private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) { Name funcName = names.fromString(iExpr.name.value); BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName); if (funcSymbol == symTable.notFoundSymbol) { return symTable.notFoundSymbol; } iExpr.symbol = funcSymbol; iExpr.langLibInvocation = true; SymbolEnv enclEnv = this.env; this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); iExpr.argExprs.add(0, iExpr.expr); checkInvocationParamAndReturnType(iExpr); this.env = enclEnv; return funcSymbol; } private void checkInvocationParamAndReturnType(BLangInvocation iExpr) { BType actualType = checkInvocationParam(iExpr); resultType = types.checkType(iExpr, actualType, this.expType); } private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams, Set<String> requiredParamNames) { if (openIncRecordParams.size() != 1) { return null; } LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields; for (String paramName : requiredParamNames) { if (!fields.containsKey(paramName)) { return null; } } return openIncRecordParams.get(0); } private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol, List<BVarSymbol> incRecordParams) { Set<String> requiredParamNames = new HashSet<>(); List<BVarSymbol> openIncRecordParams = new ArrayList<>(); for (BVarSymbol paramSymbol : invokableSymbol.params) { if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) && paramSymbol.type.getKind() == TypeKind.RECORD) { boolean recordWithDisallowFieldsOnly = true; LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields; for (String fieldName : fields.keySet()) { BField field = fields.get(fieldName); if (field.symbol.type.tag != TypeTags.NEVER) { recordWithDisallowFieldsOnly = false; incRecordParams.add(field.symbol); requiredParamNames.add(fieldName); } } if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) { openIncRecordParams.add(paramSymbol); } } else { requiredParamNames.add(paramSymbol.name.value); } } return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames); } private BType checkInvocationParam(BLangInvocation iExpr) { if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE); return symTable.semanticError; } if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type); return symTable.noType; } BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol); List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes(); List<BVarSymbol> incRecordParams = new ArrayList<>(); BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol, incRecordParams); int parameterCountForPositionalArgs = paramTypes.size(); int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size(); iExpr.requiredArgs = new ArrayList<>(); for (BVarSymbol symbol : invokableSymbol.params) { if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) || symbol.type.tag != TypeTags.RECORD) { continue; } LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields; if (fields.isEmpty()) { continue; } for (String field : fields.keySet()) { if (fields.get(field).type.tag != TypeTags.NEVER) { parameterCountForNamedArgs = parameterCountForNamedArgs - 1; break; } } } int i = 0; BLangExpression vararg = null; boolean foundNamedArg = false; for (BLangExpression expr : iExpr.argExprs) { switch (expr.getKind()) { case NAMED_ARGS_EXPR: foundNamedArg = true; if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) { iExpr.requiredArgs.add(expr); } else { dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); } i++; break; case REST_ARGS_EXPR: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG); continue; } vararg = expr; break; default: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG); } if (i < parameterCountForPositionalArgs) { iExpr.requiredArgs.add(expr); } else { iExpr.restArgs.add(expr); } i++; break; } } return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams, incRecordParamAllowAdditionalFields); } private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol; BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type; BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol; List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params); List<BLangExpression> nonRestArgs = iExpr.requiredArgs; List<BVarSymbol> valueProvidedParams = new ArrayList<>(); List<BVarSymbol> requiredParams = new ArrayList<>(); List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(); for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.isDefaultable) { continue; } requiredParams.add(nonRestParam); } for (BVarSymbol incRecordParam : incRecordParams) { if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) { requiredIncRecordParams.add(incRecordParam); } } int i = 0; for (; i < nonRestArgs.size(); i++) { BLangExpression arg = nonRestArgs.get(i); if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) { BType expectedType = paramTypes.get(i); types.checkType(arg.pos, arg.getBType(), expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); types.setImplicitCastExpr(arg, arg.getBType(), expectedType); } if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { if (i < nonRestParams.size()) { BVarSymbol param = nonRestParams.get(i); checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation); valueProvidedParams.add(param); requiredParams.remove(param); continue; } break; } if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) { BLangIdentifier argName = ((NamedArgNode) arg).getName(); BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr, nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields); if (varSym == null) { dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName); break; } requiredParams.remove(varSym); requiredIncRecordParams.remove(varSym); if (valueProvidedParams.contains(varSym)) { dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value); continue; } checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation); valueProvidedParams.add(varSym); } } BVarSymbol restParam = invokableTypeSymbol.restParam; boolean errored = false; if (!requiredParams.isEmpty() && vararg == null) { for (BVarSymbol requiredParam : requiredParams) { if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name, iExpr.name.value); errored = true; } } } if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) { for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) { for (BVarSymbol requiredParam : requiredParams) { if (requiredParam.type == requiredIncRecordParam.owner.type) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredIncRecordParam.name, iExpr.name.value); errored = true; } } } } if (restParam == null && (!iExpr.restArgs.isEmpty() || (vararg != null && valueProvidedParams.size() == nonRestParams.size()))) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); errored = true; } if (errored) { return symTable.semanticError; } BType listTypeRestArg = restParam == null ? null : restParam.type; BRecordType mappingTypeRestArg = null; if (vararg != null && nonRestArgs.size() < nonRestParams.size()) { PackageID pkgID = env.enclPkg.symbol.pkgID; List<BType> tupleMemberTypes = new ArrayList<>(); BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL); mappingTypeRestArg = new BRecordType(recordSymbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); BType tupleRestType = null; BVarSymbol fieldSymbol; for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) { BType paramType = paramTypes.get(j); BVarSymbol nonRestParam = nonRestParams.get(j); Name paramName = nonRestParam.name; tupleMemberTypes.add(paramType); boolean required = requiredParams.contains(nonRestParam); fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName, pkgID, paramType, recordSymbol, null, VIRTUAL); fields.put(paramName.value, new BField(paramName, null, fieldSymbol)); } if (listTypeRestArg != null) { if (listTypeRestArg.tag == TypeTags.ARRAY) { tupleRestType = ((BArrayType) listTypeRestArg).eType; } else if (listTypeRestArg.tag == TypeTags.TUPLE) { BTupleType restTupleType = (BTupleType) listTypeRestArg; tupleMemberTypes.addAll(restTupleType.tupleTypes); if (restTupleType.restType != null) { tupleRestType = restTupleType.restType; } } } BTupleType tupleType = new BTupleType(tupleMemberTypes); tupleType.restType = tupleRestType; listTypeRestArg = tupleType; mappingTypeRestArg.sealed = true; mappingTypeRestArg.restFieldType = symTable.noType; mappingTypeRestArg.fields = fields; recordSymbol.type = mappingTypeRestArg; mappingTypeRestArg.tsymbol = recordSymbol; } if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); return symTable.semanticError; } BType restType = null; if (vararg != null && !iExpr.restArgs.isEmpty()) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); restType = this.resultType; } else if (vararg != null) { iExpr.restArgs.add(vararg); if (mappingTypeRestArg != null) { LinkedHashSet<BType> restTypes = new LinkedHashSet<>(); restTypes.add(listTypeRestArg); restTypes.add(mappingTypeRestArg); BType actualType = BUnionType.create(null, restTypes); checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation); } else { checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); } restType = this.resultType; } else if (!iExpr.restArgs.isEmpty()) { if (listTypeRestArg.tag == TypeTags.ARRAY) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } else { BTupleType tupleType = (BTupleType) listTypeRestArg; List<BType> tupleMemberTypes = tupleType.tupleTypes; BType tupleRestType = tupleType.restType; int tupleMemCount = tupleMemberTypes.size(); for (int j = 0; j < iExpr.restArgs.size(); j++) { BLangExpression restArg = iExpr.restArgs.get(j); BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType; checkTypeParamExpr(restArg, this.env, memType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } } BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType()); if (restType != symTable.semanticError && Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) && Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) { retType = unifier.build(retType, expType, iExpr, types, symTable, dlog); } boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID); String sortFuncName = "sort"; if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) { checkArrayLibSortFuncArgs(iExpr); } if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) { return this.generateFutureType(invokableSymbol, retType); } else { return retType; } } private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) { if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } if (iExpr.argExprs.size() != 3) { return; } BLangExpression keyFunction = iExpr.argExprs.get(2); BType keyFunctionType = keyFunction.getBType(); if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (keyFunctionType.tag == TypeTags.NIL) { if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } return; } Location pos; BType returnType; if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { pos = keyFunction.pos; returnType = keyFunction.getBType().getReturnType(); } else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction); pos = arrowFunction.body.expr.pos; returnType = arrowFunction.body.expr.getBType(); if (returnType.tag == TypeTags.SEMANTIC_ERROR) { return; } } else { BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction; pos = keyLambdaFunction.function.pos; returnType = keyLambdaFunction.function.getBType().getReturnType(); } if (!types.isOrderedType(returnType, false)) { dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType); } } private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr, List<BVarSymbol> nonRestParams, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.getName().value.equals(argName.value)) { return nonRestParam; } } for (BVarSymbol incRecordParam : incRecordParams) { if (incRecordParam.getName().value.equals(argName.value)) { return incRecordParam; } } if (incRecordParamAllowAdditionalFields != null) { BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type; checkExpr(expr, env, incRecordType.restFieldType); if (!incRecordType.fields.containsKey(argName.value)) { return new BVarSymbol(0, names.fromIdNode(argName), null, symTable.noType, null, argName.pos, VIRTUAL); } } return null; } private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) { boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX); return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart); } private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral); } private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { if (typeParamAnalyzer.notRequireTypeParams(env)) { checkExpr(arg, env, expectedType); return; } if (requireTypeInference(arg, inferTypeForNumericLiteral)) { BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env); BType inferredType = checkExpr(arg, env, expType); typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType); return; } checkExpr(arg, env, expectedType); typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType); } private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) { switch (expr.getKind()) { case GROUP_EXPR: return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral); case ARROW_EXPR: case LIST_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: return true; case NUMERIC_LITERAL: return inferTypeForNumericLiteral; default: return false; } } private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) { BType fieldType = symTable.semanticError; boolean keyValueField = field.isKeyValueField(); boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP; boolean readOnlyConstructorField = false; String fieldName = null; Location pos = null; BLangExpression valueExpr = null; if (keyValueField) { valueExpr = ((BLangRecordKeyValueField) field).valueExpr; } else if (!spreadOpField) { valueExpr = (BLangRecordVarNameField) field; } switch (mappingType.tag) { case TypeTags.RECORD: if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; fieldType = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType); readOnlyConstructorField = keyValField.readonly; pos = key.expr.pos; fieldName = getKeyValueFieldName(keyValField); } else if (spreadOpField) { BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; checkExpr(spreadExpr, this.env); BType spreadExprType = spreadExpr.getBType(); if (spreadExprType.tag == TypeTags.MAP) { return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint, getAllFieldType((BRecordType) mappingType), DiagnosticErrorCode.INCOMPATIBLE_TYPES); } if (spreadExprType.tag != TypeTags.RECORD) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadExprType); return symTable.semanticError; } boolean errored = false; for (BField bField : ((BRecordType) spreadExprType).fields.values()) { BType specFieldType = bField.type; BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, this.env, bField.name, (BRecordType) mappingType); if (expectedFieldType != symTable.semanticError && !types.isAssignable(specFieldType, expectedFieldType)) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD, expectedFieldType, bField.name, specFieldType); if (!errored) { errored = true; } } } return errored ? symTable.semanticError : symTable.noType; } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; fieldType = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } break; case TypeTags.MAP: if (spreadOpField) { BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; BType spreadOpType = checkExpr(spreadExp, this.env); BType spreadOpMemberType; switch (spreadOpType.tag) { case TypeTags.RECORD: List<BType> types = new ArrayList<>(); BRecordType recordType = (BRecordType) spreadOpType; for (BField recField : recordType.fields.values()) { types.add(recField.type); } if (!recordType.sealed) { types.add(recordType.restFieldType); } spreadOpMemberType = getRepresentativeBroadType(types); break; case TypeTags.MAP: spreadOpMemberType = ((BMapType) spreadOpType).constraint; break; default: dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadOpType); return symTable.semanticError; } return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } boolean validMapKey; if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey); readOnlyConstructorField = keyValField.readonly; pos = key.pos; fieldName = getKeyValueFieldName(keyValField); } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError; break; } if (readOnlyConstructorField) { if (types.isSelectivelyImmutableType(fieldType)) { fieldType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) fieldType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } else if (!types.isInherentlyImmutableType(fieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType); fieldType = symTable.semanticError; } } if (spreadOpField) { valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } BLangExpression exprToCheck = valueExpr; if (this.nonErrorLoggingCheck) { valueExpr.cloneAttempt++; exprToCheck = nodeCloner.clone(valueExpr); } else { ((BLangNode) field).setBType(fieldType); } return checkExpr(exprToCheck, this.env, fieldType); } private BType checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) { Name fieldName; if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (recordType.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(recordType.restFieldType); } return BUnionType.create(null, fieldTypes); } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr; fieldName = names.fromIdNode(varRef.variableName); } else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) { fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value); } else { dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return symTable.semanticError; } return checkRecordLiteralKeyByName(keyExpr.pos, this.env, fieldName, recordType); } private BType checkRecordLiteralKeyByName(Location location, SymbolEnv env, Name key, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(location, env, key, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return fieldSymbol.type; } if (recordType.sealed) { dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key, recordType.tsymbol.type.getKind().typeName(), recordType); return symTable.semanticError; } return recordType.restFieldType; } private BType getAllFieldType(BRecordType recordType) { LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BField field : recordType.fields.values()) { possibleTypes.add(field.type); } BType restFieldType = recordType.restFieldType; if (restFieldType != null && restFieldType != symTable.noType) { possibleTypes.add(restFieldType); } return BUnionType.create(null, possibleTypes); } private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) { if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return false; } return true; } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF || (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) { return true; } dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return false; } private BType addNilForNillableAccessType(BType actualType) { if (actualType.isNullable()) { return actualType; } return BUnionType.create(null, actualType, symTable.nilType); } private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return symTable.semanticError; } if (recordType.sealed) { return symTable.semanticError; } return recordType.restFieldType; } private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess, Name fieldName, BObjectType objectType) { BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos, this.env, fieldName, objectType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, fieldName.value)); fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { dlog.error(bLangFieldBasedAccess.field.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol); return symTable.semanticError; } if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) && !Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) { fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol); fieldSymbol.flags &= ~Flags.ISOLATED; fieldSymbol.type.flags &= ~Flags.ISOLATED; } bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkTupleFieldType(BType tupleType, int indexValue) { BTupleType bTupleType = (BTupleType) tupleType; if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) { return bTupleType.restType; } else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) { return symTable.semanticError; } return bTupleType.tupleTypes.get(indexValue); } private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) { BLangExpression startTagName = bLangXMLElementLiteral.startTagName; checkExpr(startTagName, xmlElementEnv, symTable.stringType); BLangExpression endTagName = bLangXMLElementLiteral.endTagName; if (endTagName == null) { return; } checkExpr(endTagName, xmlElementEnv, symTable.stringType); if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME && startTagName.equals(endTagName)) { return; } if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) { return; } dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH); } private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) { for (BLangExpression expr : exprs) { checkExpr(expr, env); BType type = expr.getBType(); if (type == symTable.semanticError) { continue; } if (!types.isNonNilSimpleBasicTypeOrString(type)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType), type); } } } /** * Concatenate the consecutive text type nodes, and get the reduced set of children. * * @param exprs Child nodes * @param xmlElementEnv * @return Reduced set of children */ private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) { List<BLangExpression> newChildren = new ArrayList<>(); List<BLangExpression> tempConcatExpressions = new ArrayList<>(); for (BLangExpression expr : exprs) { BType exprType; if (expr.getKind() == NodeKind.QUERY_EXPR) { exprType = checkExpr(expr, xmlElementEnv, expType); } else { exprType = checkExpr(expr, xmlElementEnv); } if (TypeTags.isXMLTypeTag(exprType.tag)) { if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); tempConcatExpressions = new ArrayList<>(); } newChildren.add(expr); continue; } BType type = expr.getBType(); if (type.tag >= TypeTags.JSON) { if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } tempConcatExpressions.add(expr); } if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); } return newChildren; } private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.textFragments = exprs; xmlTextLiteral.pos = exprs.get(0).pos; xmlTextLiteral.setBType(symTable.xmlType); return xmlTextLiteral; } private BType getTypeOfExprInFieldAccess(BLangExpression expr) { checkExpr(expr, this.env, symTable.noType); return expr.getBType(); } private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) { accessExpr.originalType = actualType; BUnionType unionType = BUnionType.create(null, actualType); if (returnsNull(accessExpr)) { unionType.add(symTable.nilType); } BType parentType = accessExpr.expr.getBType(); if (accessExpr.errorSafeNavigation && (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION && ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) { unionType.add(symTable.errorType); } if (unionType.getMemberTypes().size() == 1) { return unionType.getMemberTypes().toArray(new BType[0])[0]; } return unionType; } private boolean returnsNull(BLangAccessExpression accessExpr) { BType parentType = accessExpr.expr.getBType(); if (parentType.isNullable() && parentType.tag != TypeTags.JSON) { return true; } if (parentType.tag != TypeTags.MAP) { return false; } if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.getBType().tag == TypeTags.MAP) { BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint; return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON; } return false; } private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.OBJECT) { return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return symTable.semanticError; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType == symTable.semanticError) { return fieldType; } return BUnionType.create(null, fieldType, symTable.nilType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.isEmpty()) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) { actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) { actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); if (actualType != symTable.semanticError) { fieldAccessExpr.originalType = actualType; return actualType; } if (!fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD, varRefType, fieldName); return actualType; } actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType); } } else if (types.isLax(varRefType)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType); return symTable.semanticError; } if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } BType laxFieldAccessType = getLaxFieldAccessType(varRefType); actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.originalType = laxFieldAccessType; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); } actualType = symTable.xmlType; fieldAccessExpr.originalType = actualType; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType); } return actualType; } private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr; String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value; BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix)); if (nsSymbol == symTable.notFoundSymbol) { dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsPrefixedFieldAccess.nsPrefix); } else if (nsSymbol.getKind() == SymbolKind.PACKAGE) { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst( nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value, (BPackageSymbol) nsSymbol, fieldAccessExpr.pos); } else { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol; } } private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) { return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType); } private BType getLaxFieldAccessType(BType exprType) { switch (exprType.tag) { case TypeTags.JSON: return symTable.jsonType; case TypeTags.XML: case TypeTags.XML_ELEMENT: return symTable.stringType; case TypeTags.MAP: return ((BMapType) exprType).constraint; case TypeTags.UNION: BUnionType unionType = (BUnionType) exprType; if (types.isSameType(symTable.jsonType, unionType)) { return symTable.jsonType; } LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType))); return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes); } return symTable.semanticError; } private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; boolean nillableExprType = false; BType effectiveType = varRefType; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); } } if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) { actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName); if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD, varRefType, fieldName); } fieldAccessExpr.nilSafeNavigation = nillableExprType; fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isLax(effectiveType)) { BType laxFieldAccessType = getLaxFieldAccessType(effectiveType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType); } if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private boolean accessCouldResultInError(BType type) { if (type.tag == TypeTags.JSON) { return true; } if (type.tag == TypeTags.MAP) { return false; } if (type.tag == TypeTags.XML) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError); } else { return false; } } private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) { BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType()); boolean nillableExprType = false; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } if (nillableExprType) { varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); if (!types.isSubTypeOfMapping(varRefType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } } } } BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; BType actualType = symTable.semanticError; if (types.isSubTypeOfMapping(varRefType)) { checkExpr(indexExpr, this.env, symTable.stringType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType); if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD, fieldName, indexBasedAccessExpr.expr.getBType()); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } indexBasedAccessExpr.nilSafeNavigation = nillableExprType; indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isSubTypeOfList(varRefType)) { checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType); indexBasedAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) { dlog.error(indexBasedAccessExpr.indexExpr.pos, DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr)); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } } else if (types.isAssignable(varRefType, symTable.stringType)) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } indexBasedAccessExpr.originalType = symTable.stringType; actualType = symTable.stringType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (indexBasedAccessExpr.isLValue) { indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); return actualType; } BType type = checkExpr(indexExpr, this.env, symTable.intType); if (type == symTable.semanticError) { return type; } indexBasedAccessExpr.originalType = varRefType; actualType = varRefType; } else if (varRefType.tag == TypeTags.TABLE) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS, varRefType); return symTable.semanticError; } BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType(); BType keyTypeConstraint = tableType.keyTypeConstraint; if (tableType.keyTypeConstraint == null) { keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()). fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint); if (keyTypeConstraint == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE, indexBasedAccessExpr.expr); return symTable.semanticError; } } if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) { checkExpr(indexExpr, this.env, keyTypeConstraint); if (indexExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } else { List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr) indexBasedAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes; if (keyConstraintTypes.size() != multiKeyExpressionList.size()) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } for (int i = 0; i < multiKeyExpressionList.size(); i++) { BLangExpression keyExpr = multiKeyExpressionList.get(i); checkExpr(keyExpr, this.env, keyConstraintTypes.get(i)); if (keyExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } } if (expType.tag != TypeTags.NONE) { BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType); if (resultType == symTable.semanticError) { return symTable.semanticError; } } BType constraint = tableType.constraint; actualType = addNilForNillableAccessType(constraint); indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (varRefType == symTable.semanticError) { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); return symTable.semanticError; } else { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (nillableExprType && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private Long getConstIndex(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value : (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private String getConstFieldName(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value : (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType, BArrayType arrayType) { BType actualType = symTable.semanticError; switch (indexExprType.tag) { case TypeTags.INT: BLangExpression indexExpr = indexBasedAccess.indexExpr; if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) { actualType = arrayType.eType; break; } actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType; break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) indexExprType; boolean validIndexExists = false; for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); if (indexValue >= 0 && (arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) { validIndexExists = true; break; } } if (!validIndexExists) { return symTable.semanticError; } actualType = arrayType.eType; break; case TypeTags.UNION: List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(matchedType -> (BFiniteType) matchedType) .collect(Collectors.toList()); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType); if (elementType == symTable.semanticError) { return symTable.semanticError; } actualType = arrayType.eType; } return actualType; } private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.ARRAY) { return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type); } if (type.tag == TypeTags.TUPLE) { return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType()); } LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.INT: if (isConst(indexExpr)) { actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue()); } else { BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType(); LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>()); actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); } break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); BType fieldType = checkTupleFieldType(tuple, indexValue); if (fieldType.tag != TypeTags.SEMANTIC_ERROR) { possibleTypes.add(fieldType); } } if (possibleTypes.size() == 0) { return symTable.semanticError; } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) { tupleType.tupleTypes .forEach(memberType -> { if (memberType.tag == TypeTags.UNION) { collectMemberTypes((BUnionType) memberType, memberTypes); } else { memberTypes.add(memberType); } }); return memberTypes; } private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.MAP) { BType constraint = ((BMapType) type).constraint; return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint); } if (type.tag == TypeTags.RECORD) { return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType()); } BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.STRING: if (isConst(indexExpr)) { String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr)); actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType != symTable.semanticError) { return actualType; } actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { return actualType; } if (actualType == symTable.neverType) { return actualType; } return addNilForNillableAccessType(actualType); } if (accessExpr.isLValue) { return actualType; } return addNilForNillableAccessType(actualType); } LinkedHashSet<BType> fieldTypes = record.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (record.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(record.restFieldType); } if (fieldTypes.stream().noneMatch(BType::isNullable)) { fieldTypes.add(symTable.nilType); } actualType = BUnionType.create(null, fieldTypes); break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { String fieldName = (String) ((BLangLiteral) finiteMember).value; BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); } if (fieldType != symTable.semanticError) { fieldType = addNilForNillableAccessType(fieldType); } } if (fieldType.tag == TypeTags.SEMANTIC_ERROR) { continue; } possibleTypes.add(fieldType); } if (possibleTypes.isEmpty()) { return symTable.semanticError; } if (possibleTypes.stream().noneMatch(BType::isNullable)) { possibleTypes.add(symTable.nilType); } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private List<BType> getTypesList(BType type) { if (type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) type; return new ArrayList<>(unionType.getMemberTypes()); } else { return Lists.of(type); } } private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType()); LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>(); for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternExprType = pattern.expr.getBType(); matchExprTypes.addAll(getTypesList(patternExprType)); if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) { return new LinkedHashSet<BType>() { { add(symTable.semanticError); } }; } assignable = this.types.isAssignable(type, pattern.variable.getBType()); if (assignable) { break; } } if (!assignable) { matchExprTypes.add(type); } } return matchExprTypes; } private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) { if (encounteredTypes.contains(type)) { return false; } encounteredTypes.add(type); switch (type.tag) { case TypeTags.UNION: for (BType bType1 : ((BUnionType) type).getMemberTypes()) { if (couldHoldTableValues(bType1, encounteredTypes)) { return true; } } return false; case TypeTags.MAP: return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes); case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (couldHoldTableValues(field.type, encounteredTypes)) { return true; } } return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes); case TypeTags.ARRAY: return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes); case TypeTags.TUPLE: for (BType bType : ((BTupleType) type).getTupleTypes()) { if (couldHoldTableValues(bType, encounteredTypes)) { return true; } } return false; } return false; } private boolean isConst(BLangExpression expression) { if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) { return true; } if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; } private Name getCurrentCompUnit(BLangNode node) { return names.fromString(node.pos.lineRange().filePath()); } private BType getRepresentativeBroadType(List<BType> inferredTypeList) { for (int i = 0; i < inferredTypeList.size(); i++) { BType type = inferredTypeList.get(i); if (type.tag == TypeTags.SEMANTIC_ERROR) { return type; } for (int j = i + 1; j < inferredTypeList.size(); j++) { BType otherType = inferredTypeList.get(j); if (otherType.tag == TypeTags.SEMANTIC_ERROR) { return otherType; } if (types.isAssignable(otherType, type)) { inferredTypeList.remove(j); j -= 1; continue; } if (types.isAssignable(type, otherType)) { inferredTypeList.remove(i); i -= 1; break; } } } if (inferredTypeList.size() == 1) { return inferredTypeList.get(0); } return BUnionType.create(null, inferredTypeList.toArray(new BType[0])); } private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>(); List<BType> restFieldTypes = new ArrayList<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValue.key; BLangExpression expression = keyValue.valueExpr; BLangExpression keyExpr = key.expr; if (key.computedKey) { checkExpr(keyExpr, env, symTable.stringType); BType exprType = checkExpr(expression, env, expType); if (isUniqueType(restFieldTypes, exprType)) { restFieldTypes.add(exprType); } } else { addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr), keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) : checkExpr(expression, env, expType), true, keyValue.readonly); } } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType); int typeTag = type.tag; if (typeTag == TypeTags.MAP) { BType constraintType = ((BMapType) type).constraint; if (isUniqueType(restFieldTypes, constraintType)) { restFieldTypes.add(constraintType); } } if (type.tag != TypeTags.RECORD) { continue; } BRecordType recordType = (BRecordType) type; for (BField recField : recordType.fields.values()) { addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type, !Symbols.isOptional(recField.symbol), false); } if (!recordType.sealed) { BType restFieldType = recordType.restFieldType; if (isUniqueType(restFieldTypes, restFieldType)) { restFieldTypes.add(restFieldType); } } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) : checkExpr(varNameField, env, expType), true, varNameField.readonly); } } LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); boolean allReadOnlyNonRestFields = true; for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) { FieldInfo fieldInfo = entry.getValue(); List<BType> types = fieldInfo.types; if (types.contains(symTable.semanticError)) { return symTable.semanticError; } String key = entry.getKey(); Name fieldName = names.fromString(key); BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0])); Set<Flag> flags = new HashSet<>(); if (fieldInfo.required) { flags.add(Flag.REQUIRED); } else { flags.add(Flag.OPTIONAL); } if (fieldInfo.readonly) { flags.add(Flag.READONLY); } else if (allReadOnlyNonRestFields) { allReadOnlyNonRestFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = fields; if (restFieldTypes.contains(symTable.semanticError)) { return symTable.semanticError; } if (restFieldTypes.isEmpty()) { recordType.sealed = true; recordType.restFieldType = symTable.noType; } else if (restFieldTypes.size() == 1) { recordType.restFieldType = restFieldTypes.get(0); } else { recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0])); } recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) { recordType.flags |= Flags.READONLY; recordSymbol.flags |= Flags.READONLY; } BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); return recordType; } private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location, SymbolOrigin origin) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)), pkgID, null, env.scope.owner, location, origin); BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, location); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); return recordSymbol; } private String getKeyName(BLangExpression key) { return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? ((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value; } private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString, BType exprType, boolean required, boolean readonly) { if (!nonRestFieldTypes.containsKey(keyString)) { nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required, readonly)); return; } FieldInfo fieldInfo = nonRestFieldTypes.get(keyString); List<BType> typeList = fieldInfo.types; if (isUniqueType(typeList, exprType)) { typeList.add(exprType); } if (required && !fieldInfo.required) { fieldInfo.required = true; } } private boolean isUniqueType(List<BType> typeList, BType type) { boolean isRecord = type.tag == TypeTags.RECORD; for (BType bType : typeList) { if (isRecord) { if (type == bType) { return false; } } else if (types.isSameType(type, bType)) { return false; } } return true; } private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType, BType expType) { if (expType == symTable.semanticError) { return expType; } boolean unionExpType = expType.tag == TypeTags.UNION; if (expType == mutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) { return mutableXmlSubType; } BXMLSubType immutableXmlSubType = (BXMLSubType) ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable, anonymousModelHelper, names); if (expType == immutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) { return immutableXmlSubType; } if (!unionExpType) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } List<BType> compatibleTypes = new ArrayList<>(); for (BType memberType : ((BUnionType) expType).getMemberTypes()) { if (compatibleTypes.contains(memberType)) { continue; } if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) { compatibleTypes.add(memberType); continue; } if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) { compatibleTypes.add(mutableXmlSubType); continue; } if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) { compatibleTypes.add(immutableXmlSubType); } } if (compatibleTypes.isEmpty()) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } if (compatibleTypes.size() == 1) { return compatibleTypes.get(0); } dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) { for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) { BType childType = modifiedChild.getBType(); if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) { continue; } modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, (SelectivelyImmutableReferenceType) childType, env, symTable, anonymousModelHelper, names)); if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) { markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild); } } } private void logUndefinedSymbolError(Location pos, String name) { if (!missingNodesHelper.isMissingNode(name)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name); } } private void markTypeAsIsolated(BType actualType) { actualType.flags |= Flags.ISOLATED; actualType.tsymbol.flags |= Flags.ISOLATED; } private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) { return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS); } private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) { List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions; BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env, names.fromIdNode(userDefinedType.pkgAlias), names.fromIdNode(userDefinedType.typeName)); for (BLangClassDefinition classDefinition : classDefinitions) { if (classDefinition.symbol == symbol) { return classDefinition; } } return null; } private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType, BLangClassDefinition classDefForConstructor, SymbolEnv env, boolean logErrors) { boolean hasNeverReadOnlyField = false; for (BField field : actualObjectType.fields.values()) { BType fieldType = field.type; if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) { semanticAnalyzer.analyzeNode(classDefForConstructor, env); hasNeverReadOnlyField = true; if (!logErrors) { return; } dlog.error(field.pos, DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE, fieldType); } } if (hasNeverReadOnlyField) { return; } classDefForConstructor.flagSet.add(Flag.READONLY); actualObjectType.flags |= Flags.READONLY; actualObjectType.tsymbol.flags |= Flags.READONLY; ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types, anonymousModelHelper, symTable, names, cIExpr.pos); semanticAnalyzer.analyzeNode(classDefForConstructor, env); } private void markConstructedObjectIsolatedness(BObjectType actualObjectType) { if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) { markTypeAsIsolated(actualObjectType); return; } for (BField field : actualObjectType.fields.values()) { if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) || !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) { return; } } markTypeAsIsolated(actualObjectType); } private void markLeafNode(BLangAccessExpression accessExpression) { BLangNode parent = accessExpression.parent; if (parent == null) { accessExpression.leafNode = true; return; } NodeKind kind = parent.getKind(); while (kind == NodeKind.GROUP_EXPR) { parent = parent.parent; if (parent == null) { accessExpression.leafNode = true; break; } kind = parent.getKind(); } if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) { accessExpression.leafNode = true; } } private static class FieldInfo { List<BType> types; boolean required; boolean readonly; private FieldInfo(List<BType> types, boolean required, boolean readonly) { this.types = types; this.required = required; this.readonly = readonly; } } }
class TypeChecker extends BLangNodeVisitor { private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>(); private static Set<String> listLengthModifierFunctions = new HashSet<>(); private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>(); private static final String TABLE_TNAME = "table"; private static final String LIST_LANG_LIB = "lang.array"; private static final String MAP_LANG_LIB = "lang.map"; private static final String TABLE_LANG_LIB = "lang.table"; private static final String VALUE_LANG_LIB = "lang.value"; private static final String XML_LANG_LIB = "lang.xml"; private static final String FUNCTION_NAME_PUSH = "push"; private static final String FUNCTION_NAME_POP = "pop"; private static final String FUNCTION_NAME_SHIFT = "shift"; private static final String FUNCTION_NAME_UNSHIFT = "unshift"; private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType"; private Names names; private SymbolTable symTable; private SymbolEnter symbolEnter; private SymbolResolver symResolver; private NodeCloner nodeCloner; private Types types; private BLangDiagnosticLog dlog; private SymbolEnv env; private boolean isTypeChecked; private TypeNarrower typeNarrower; private TypeParamAnalyzer typeParamAnalyzer; private BLangAnonymousModelHelper anonymousModelHelper; private SemanticAnalyzer semanticAnalyzer; private Unifier unifier; private boolean nonErrorLoggingCheck = false; private int letCount = 0; private Stack<SymbolEnv> queryEnvs, prevEnvs; private Stack<BLangSelectClause> selectClauses; private boolean checkWithinQueryExpr = false; private BLangMissingNodesHelper missingNodesHelper; private boolean breakToParallelQueryEnv = false; /** * Expected types or inherited types. */ private BType expType; private BType resultType; private DiagnosticCode diagCode; static { listLengthModifierFunctions.add(FUNCTION_NAME_PUSH); listLengthModifierFunctions.add(FUNCTION_NAME_POP); listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT); listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT); modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeAll"); add("setLength"); add("reverse"); add("sort"); add("pop"); add("push"); add("shift"); add("unshift"); }}); modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{ add("put"); add("add"); add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{ add("mergeJson"); }}); modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{ add("setName"); add("setChildren"); add("strip"); }}); } public static TypeChecker getInstance(CompilerContext context) { TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY); if (typeChecker == null) { typeChecker = new TypeChecker(context); } return typeChecker; } public TypeChecker(CompilerContext context) { context.put(TYPE_CHECKER_KEY, this); this.names = Names.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); this.selectClauses = new Stack<>(); this.queryEnvs = new Stack<>(); this.prevEnvs = new Stack<>(); this.unifier = new Unifier(); } public BType checkExpr(BLangExpression expr, SymbolEnv env) { return checkExpr(expr, env, symTable.noType); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) { return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) { if (expr.typeChecked) { return expr.getBType(); } if (expType.tag == TypeTags.INTERSECTION) { expType = ((BIntersectionType) expType).effectiveType; } SymbolEnv prevEnv = this.env; BType preExpType = this.expType; DiagnosticCode preDiagCode = this.diagCode; this.env = env; this.diagCode = diagCode; this.expType = expType; this.isTypeChecked = true; expr.expectedType = expType; expr.accept(this); if (resultType.tag == TypeTags.INTERSECTION) { resultType = ((BIntersectionType) resultType).effectiveType; } expr.setTypeCheckedType(resultType); expr.typeChecked = isTypeChecked; this.env = prevEnv; this.expType = preExpType; this.diagCode = preDiagCode; validateAndSetExprExpectedType(expr); return resultType; } private void validateAndSetExprExpectedType(BLangExpression expr) { if (resultType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null && expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) { return; } expr.expectedType = resultType; } public void visit(BLangLiteral literalExpr) { BType literalType = setLiteralValueAndGetType(literalExpr, expType); if (literalType == symTable.semanticError || literalExpr.isFiniteContext) { return; } resultType = types.checkType(literalExpr, literalType, expType); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { checkXMLNamespacePrefixes(xmlElementAccess.filters); checkExpr(xmlElementAccess.expr, env, symTable.xmlType); resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { checkXMLNamespacePrefixes(xmlNavigation.filters); if (xmlNavigation.childIndex != null) { checkExpr(xmlNavigation.childIndex, env, symTable.intType); } BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType); if (exprType.tag == TypeTags.UNION) { dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS, xmlNavigation.expr.getBType()); } BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN ? symTable.xmlType : symTable.xmlElementSeqType; types.checkType(xmlNavigation, actualType, expType); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { resultType = symTable.xmlType; } else { resultType = symTable.xmlElementSeqType; } } private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) { for (BLangXMLElementFilter filter : filters) { if (!filter.namespace.isEmpty()) { Name nsName = names.fromString(filter.namespace); BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName); filter.namespaceSymbol = nsSymbol; if (nsSymbol == symTable.notFoundSymbol) { dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName); } } } } private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) { BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag); Object literalValue = literalExpr.value; if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) { if (expType.tag == TypeTags.FLOAT) { literalType = symTable.floatType; literalExpr.value = ((Long) literalValue).doubleValue(); } else if (expType.tag == TypeTags.DECIMAL && !NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) { literalType = symTable.decimalType; literalExpr.value = String.valueOf(literalValue); } else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) { literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue); if (literalType == symTable.semanticError) { return symTable.semanticError; } } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); BType intSubType = null; boolean intOrIntCompatibleTypeFound = false; for (BType memType : memberTypes) { if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) || memType.tag == TypeTags.BYTE) { intSubType = memType; } else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { intOrIntCompatibleTypeFound = true; } } if (intOrIntCompatibleTypeFound) { return setLiteralValueAndGetType(literalExpr, symTable.intType); } if (intSubType != null) { return setLiteralValueAndGetType(literalExpr, intSubType); } BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) { return setLiteralValueAndGetType(literalExpr, symTable.byteType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) { return setLiteralValueAndGetType(literalExpr, symTable.floatType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } } } else if (literalType.tag == TypeTags.FLOAT) { String literal = String.valueOf(literalValue); String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal); boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal); if (expType.tag == TypeTags.DECIMAL) { if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.floatType); resultType = symTable.semanticError; return resultType; } literalType = symTable.decimalType; literalExpr.value = numericLiteral; } else if (expType.tag == TypeTags.FLOAT) { literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral)); } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (!isDiscriminatedFloat && literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType); if (unionMember != symTable.noType) { return unionMember; } } } else if (literalType.tag == TypeTags.DECIMAL) { return decimalLiteral(literalValue, literalExpr, expType); } else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) { if (expType.tag == TypeTags.CHAR_STRING) { return symTable.charStringType; } if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); for (BType memType : memberTypes) { if (TypeTags.isStringTypeTag(memType.tag)) { return setLiteralValueAndGetType(literalExpr, memType); } else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) { return setLiteralValueAndGetType(literalExpr, symTable.charStringType); } else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType, literalExpr)) { setLiteralValueForFiniteType(literalExpr, symTable.charStringType); return literalType; } } } boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else { if (this.expType.tag == TypeTags.FINITE) { boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else if (this.expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) this.expType; boolean foundMember = unionType.getMemberTypes() .stream() .anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr)); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } } if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) { literalType = new BArrayType(symTable.byteType); } return literalType; } private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) { Set<BType> memberTypes = expType.getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == desiredType.tag || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, desiredType); } BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } return symTable.noType; } private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType, int targetMemberTypeTag) { for (BLangExpression valueExpr : finiteType.getValueSpace()) { if (valueExpr.getBType().tag == targetMemberTypeTag && types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) { return true; } } return false; } private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) { String literal = String.valueOf(literalValue); if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) { dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType); resultType = symTable.semanticError; return resultType; } if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType); if (unionMember != symTable.noType) { return unionMember; } } literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal); resultType = symTable.decimalType; return symTable.decimalType; } private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) { types.setImplicitCastExpr(literalExpr, type, this.expType); this.resultType = type; literalExpr.isFiniteContext = true; } private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) { List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(memFiniteType -> (BFiniteType) memFiniteType) .collect(Collectors.toList()); if (finiteTypeMembers.isEmpty()) { return symTable.semanticError; } int tag = matchType.tag; Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>(); for (BFiniteType finiteType : finiteTypeMembers) { Set<BLangExpression> set = new HashSet<>(); for (BLangExpression expression : finiteType.getValueSpace()) { if (expression.getBType().tag == tag) { set.add(expression); } } matchedValueSpace.addAll(set); } if (matchedValueSpace.isEmpty()) { return symTable.semanticError; } return new BFiniteType(null, matchedValueSpace); } private BType getIntLiteralType(Location location, BType expType, BType literalType, Object literalValue) { switch (expType.tag) { case TypeTags.INT: return symTable.intType; case TypeTags.BYTE: if (types.isByteLiteralValue((Long) literalValue)) { return symTable.byteType; } break; case TypeTags.SIGNED32_INT: if (types.isSigned32LiteralValue((Long) literalValue)) { return symTable.signed32IntType; } break; case TypeTags.SIGNED16_INT: if (types.isSigned16LiteralValue((Long) literalValue)) { return symTable.signed16IntType; } break; case TypeTags.SIGNED8_INT: if (types.isSigned8LiteralValue((Long) literalValue)) { return symTable.signed8IntType; } break; case TypeTags.UNSIGNED32_INT: if (types.isUnsigned32LiteralValue((Long) literalValue)) { return symTable.unsigned32IntType; } break; case TypeTags.UNSIGNED16_INT: if (types.isUnsigned16LiteralValue((Long) literalValue)) { return symTable.unsigned16IntType; } break; case TypeTags.UNSIGNED8_INT: if (types.isUnsigned8LiteralValue((Long) literalValue)) { return symTable.unsigned8IntType; } break; default: } dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType); resultType = symTable.semanticError; return resultType; } @Override public void visit(BLangListConstructorExpr listConstructor) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) { BType inferredType = getInferredTupleType(listConstructor, expType); resultType = inferredType == symTable.semanticError ? symTable.semanticError : types.checkType(listConstructor, inferredType, expType); return; } resultType = checkListConstructorCompatibility(expType, listConstructor); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) { List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { resultType = symTable.semanticError; return; } } if (tableConstructorExpr.recordLiteralList.size() == 0) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE); resultType = symTable.semanticError; return; } BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr); BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { recordLiteral.setBType(inherentMemberType); } if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) { resultType = symTable.semanticError; return; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } resultType = tableType; return; } BType applicableExpType = expType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) expType).effectiveType : expType; if (applicableExpType.tag == TypeTags.TABLE) { List<BType> memTypes = new ArrayList<>(); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { BLangRecordLiteral clonedExpr = recordLiteral; if (this.nonErrorLoggingCheck) { clonedExpr.cloneAttempt++; clonedExpr = nodeCloner.cloneNode(recordLiteral); } BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint); if (recordType == symTable.semanticError) { resultType = symTable.semanticError; return; } memTypes.add(recordType); } if (((BTableType) applicableExpType).constraint.tag == TypeTags.MAP && ((BTableType) applicableExpType).isTypeInlineDefined) { validateMapConstraintTable(tableConstructorExpr, applicableExpType); return; } if (!(validateTableType((BTableType) applicableExpType, tableConstructorExpr.recordLiteralList) && validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) { resultType = symTable.semanticError; return; } BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType), null); if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) { tableType.flags |= Flags.READONLY; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } BTableType expectedTableType = (BTableType) applicableExpType; if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) { tableType.fieldNameList = expectedTableType.fieldNameList; } resultType = tableType; } else if (applicableExpType.tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> matchingTypes = new ArrayList<>(); BUnionType expectedType = (BUnionType) applicableExpType; for (BType memType : expectedType.getMemberTypes()) { dlog.resetErrorCount(); BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr); } BType resultType = checkExpr(clonedTableExpr, env, memType); if (resultType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(matchingTypes, resultType)) { matchingTypes.add(resultType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (matchingTypes.isEmpty()) { BLangTableConstructorExpr exprToLog = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; exprToLog = nodeCloner.cloneNode(tableConstructorExpr); } dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getInferredTableType(exprToLog)); } else if (matchingTypes.size() != 1) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); } else { resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0)); return; } resultType = symTable.semanticError; } else { resultType = symTable.semanticError; } } private BType getInferredTableType(BLangTableConstructorExpr exprToLog) { List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null); } private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { if (tableConstructorExpr.tableKeySpecifier != null) { if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr. tableKeySpecifier), tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return true; } tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); } return false; } private BType inferTableMemberType(List<BType> memTypes, BType expType) { if (memTypes.isEmpty()) { return ((BTableType) expType).constraint; } LinkedHashSet<BType> result = new LinkedHashSet<>(); result.add(memTypes.get(0)); BUnionType unionType = BUnionType.create(null, result); for (int i = 1; i < memTypes.size(); i++) { BType source = memTypes.get(i); if (!types.isAssignable(source, unionType)) { result.add(source); unionType = BUnionType.create(null, result); } } if (unionType.getMemberTypes().size() == 1) { return memTypes.get(0); } return unionType; } private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) { BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier; List<String> keySpecifierFieldNames = new ArrayList<>(); Set<BField> allFieldSet = new LinkedHashSet<>(); for (BType memType : memTypes) { allFieldSet.addAll(((BRecordType) memType).fields.values()); } Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet); for (BType memType : memTypes) { commonFieldSet.retainAll(((BRecordType) memType).fields.values()); } List<String> requiredFieldNames = new ArrayList<>(); if (keySpecifier != null) { for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) { requiredFieldNames.add(((BLangIdentifier) identifierNode).value); keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value); } } List<String> fieldNames = new ArrayList<>(); for (BField field : allFieldSet) { String fieldName = field.name.value; if (fieldNames.contains(fieldName)) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY, fieldName); return symTable.semanticError; } fieldNames.add(fieldName); boolean isOptional = true; for (BField commonField : commonFieldSet) { if (commonField.name.value.equals(fieldName)) { isOptional = false; requiredFieldNames.add(commonField.name.value); } } if (isOptional) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL)); } else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY)); } else if (requiredFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)); } } return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos); } private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL); for (BField field : allFieldSet) { recordSymbol.scope.define(field.name, field.symbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = allFieldSet.stream().collect(getFieldCollector()); recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); recordType.sealed = true; recordType.restFieldType = symTable.noType; return recordType; } private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() { BinaryOperator<BField> mergeFunc = (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new); } private boolean validateTableType(BTableType tableType, List<BLangRecordLiteral> recordLiterals) { BType constraint = tableType.constraint; if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) { dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint); resultType = symTable.semanticError; return false; } List<String> fieldNameList = tableType.fieldNameList; if (fieldNameList != null) { boolean isKeySpecifierValidated = !tableType.isTypeInlineDefined || validateKeySpecifier(fieldNameList, constraint.tag != TypeTags.INTERSECTION ? constraint : ((BIntersectionType) constraint).effectiveType, tableType.keyPos); return (isKeySpecifierValidated && validateTableKeyValue(fieldNameList, recordLiterals)); } return true; } private boolean validateTableKeyValue(List<String> keySpecifierFieldNames, List<BLangRecordLiteral> recordLiterals) { for (String fieldName : keySpecifierFieldNames) { for (BLangRecordLiteral recordLiteral : recordLiterals) { BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName); if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) { continue; } dlog.error(recordLiteral.pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName); resultType = symTable.semanticError; return false; } } return true; } private boolean isConstExpression(BLangExpression expression) { switch(expression.getKind()) { case LITERAL: case NUMERIC_LITERAL: case STRING_TEMPLATE_LITERAL: case XML_ELEMENT_LITERAL: case XML_TEXT_LITERAL: case LIST_CONSTRUCTOR_EXPR: case TABLE_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: case TYPE_CONVERSION_EXPR: case UNARY_EXPR: case BINARY_EXPR: case TYPE_TEST_EXPR: case TERNARY_EXPR: return true; case SIMPLE_VARIABLE_REF: return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; case GROUP_EXPR: return isConstExpression(((BLangGroupExpr) expression).expression); default: return false; } } private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral, String fieldName) { for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) { BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField; if (fieldName.equals(recordKeyValueField.key.toString())) { return recordKeyValueField; } } return null; } public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint, Location pos) { for (String fieldName : fieldNameList) { BField field = types.getTableConstraintField(constraint, fieldName); if (field == null) { dlog.error(pos, DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName); resultType = symTable.semanticError; return false; } if (!types.isAssignable(field.type, symTable.anydataType)) { dlog.error(pos, DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint); resultType = symTable.semanticError; return false; } } return true; } private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { BType constraintType = tableType.constraint; if (tableConstructorExpr.tableKeySpecifier != null) { List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); if (tableType.fieldNameList == null && !validateKeySpecifier(fieldNameList, constraintType.tag != TypeTags.INTERSECTION ? constraintType : ((BIntersectionType) constraintType).effectiveType, tableConstructorExpr.tableKeySpecifier.pos)) { return false; } if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH, tableType.fieldNameList.toString(), fieldNameList.toString()); resultType = symTable.semanticError; return false; } } BType keyTypeConstraint = tableType.keyTypeConstraint; if (keyTypeConstraint != null) { List<BType> memberTypes = new ArrayList<>(); if (keyTypeConstraint.tag == TypeTags.TUPLE) { for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) { memberTypes.add((BType) type); } } else { memberTypes.add(keyTypeConstraint); } if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) { return true; } if (tableConstructorExpr.tableKeySpecifier == null || tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) { dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT, memberTypes.size(), tableConstructorExpr.tableKeySpecifier == null ? 0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size()); resultType = symTable.semanticError; return false; } List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier. fieldNameIdentifierList; int index = 0; for (IdentifierNode identifier : fieldNameIdentifierList) { BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value); if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT, fieldNameIdentifierList.toString(), memberTypes.toString()); resultType = symTable.semanticError; return false; } index++; } } return true; } public void validateMapConstraintTable(BLangTableConstructorExpr tableConstructorExpr, BType expType) { if (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) { dlog.error(((BTableType) expType).keyPos, DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (tableConstructorExpr != null && tableConstructorExpr.tableKeySpecifier != null) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (tableConstructorExpr != null && !(validateTableType((BTableType) expType, tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return; } resultType = expType; } private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) { List<String> fieldNamesList = new ArrayList<>(); for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) { fieldNamesList.add(((BLangIdentifier) identifier).value); } return fieldNamesList; } private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) { if (fieldNames == null) { return symTable.semanticError; } List<BType> memTypes = new ArrayList<>(); for (String fieldName : fieldNames) { BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName); if (tableConstraintField == null) { return symTable.semanticError; } BType fieldType = tableConstraintField.type; memTypes.add(fieldType); } if (memTypes.size() == 1) { return memTypes.get(0); } return new BTupleType(memTypes); } private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.nonErrorLoggingCheck = true; this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.cloneNode(listConstructor); } BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType); if (!erroredExpType && inferredTupleType != symTable.semanticError) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType); } return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor); } if (tag == TypeTags.INTERSECTION) { return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor); } BType possibleType = getListConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.ARRAY: return checkArrayType(listConstructor, (BArrayType) possibleType); case TypeTags.TUPLE: return checkTupleType(listConstructor, (BTupleType) possibleType); case TypeTags.READONLY: return checkReadOnlyListType(listConstructor); case TypeTags.TYPEDESC: List<BType> results = new ArrayList<>(); listConstructor.isTypedescExpr = true; for (int i = 0; i < listConstructor.exprs.size(); i++) { results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType)); } List<BType> actualTypes = new ArrayList<>(); for (int i = 0; i < listConstructor.exprs.size(); i++) { final BLangExpression expr = listConstructor.exprs.get(i); if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) { actualTypes.add(((BLangTypedescExpr) expr).resolvedType); } else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { actualTypes.add(((BLangSimpleVarRef) expr).symbol.type); } else { actualTypes.add(results.get(i)); } } if (actualTypes.size() == 1) { listConstructor.typedescType = actualTypes.get(0); } else { listConstructor.typedescType = new BTupleType(actualTypes); } return new BTypedescType(listConstructor.typedescType, null); } BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.cloneNode(listConstructor); } if (bType == symTable.semanticError) { getInferredTupleType(exprToLog, symTable.semanticError); } else { dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType, getInferredTupleType(exprToLog, symTable.noType)); } return symTable.semanticError; } private BType getListConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.READONLY: case TypeTags.TYPEDESC: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) { BType eType = arrayType.eType; if (arrayType.state == BArrayState.INFERRED) { arrayType.size = listConstructor.exprs.size(); arrayType.state = BArrayState.CLOSED; } else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) { if (arrayType.size < listConstructor.exprs.size()) { dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size()); return symTable.semanticError; } if (!types.hasFillerValue(eType)) { dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType); return symTable.semanticError; } } boolean errored = false; for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(eType, expr) && !errored) { errored = true; } } return errored ? symTable.semanticError : arrayType; } private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) { List<BLangExpression> exprs = listConstructor.exprs; List<BType> memberTypes = tupleType.tupleTypes; BType restType = tupleType.restType; int listExprSize = exprs.size(); int memberTypeSize = memberTypes.size(); if (listExprSize < memberTypeSize) { for (int i = listExprSize; i < memberTypeSize; i++) { if (!types.hasFillerValue(memberTypes.get(i))) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } } } else if (listExprSize > memberTypeSize && restType == null) { dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } boolean errored = false; int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize; for (int i = 0; i < nonRestCountToCheck; i++) { if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) { errored = true; } } for (int i = nonRestCountToCheck; i < exprs.size(); i++) { if (exprIncompatible(restType, exprs.get(i)) && !errored) { errored = true; } } return errored ? symTable.semanticError : tupleType; } private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return types.checkType(listConstructor, inferredType, symTable.readonlyType); } for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(symTable.readonlyType, expr)) { return symTable.semanticError; } } return symTable.readonlyType; } private boolean exprIncompatible(BType eType, BLangExpression expr) { if (expr.typeChecked) { return expr.getBType() == symTable.semanticError; } BLangExpression exprToCheck = expr; if (this.nonErrorLoggingCheck) { expr.cloneAttempt++; exprToCheck = nodeCloner.cloneNode(expr); } return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError; } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) { return checkExprList(exprs, env, symTable.noType); } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) { List<BType> types = new ArrayList<>(); SymbolEnv prevEnv = this.env; BType preExpType = this.expType; this.env = env; this.expType = expType; for (BLangExpression e : exprs) { checkExpr(e, this.env, expType); types.add(resultType); } this.env = prevEnv; this.expType = preExpType; return types; } private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) { List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } BTupleType tupleType = new BTupleType(memTypes); if (expType.tag != TypeTags.READONLY) { return tupleType; } tupleType.flags |= Flags.READONLY; return tupleType; } public void visit(BLangRecordLiteral recordLiteral) { int expTypeTag = expType.tag; if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) { expType = defineInferredRecordType(recordLiteral, expType); } else if (expTypeTag == TypeTags.OBJECT) { dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType); resultType = symTable.semanticError; return; } resultType = getEffectiveMappingType(recordLiteral, checkMappingConstructorCompatibility(expType, recordLiteral)); } private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) { if (applicableMappingType == symTable.semanticError || (applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags, Flags.READONLY))) { return applicableMappingType; } Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>(); LinkedHashMap<String, BField> applicableTypeFields = applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields : new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { continue; } String name; if (field.isKeyValueField()) { BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field; if (!keyValueField.readonly) { continue; } BLangExpression keyExpr = keyValueField.key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { name = ((BLangSimpleVarRef) keyExpr).variableName.value; } else { name = (String) ((BLangLiteral) keyExpr).value; } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; if (!varNameField.readonly) { continue; } name = varNameField.variableName.value; } if (applicableTypeFields.containsKey(name) && Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) { continue; } readOnlyFields.put(name, field); } if (readOnlyFields.isEmpty()) { return applicableMappingType; } PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); LinkedHashMap<String, BField> newFields = new LinkedHashMap<>(); for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) { RecordLiteralNode.RecordField field = readOnlyEntry.getValue(); String key = readOnlyEntry.getKey(); Name fieldName = names.fromString(key); BType readOnlyFieldType; if (field.isKeyValueField()) { readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType(); } else { readOnlyFieldType = ((BLangRecordVarNameField) field).getBType(); } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(Flag.REQUIRED); add(Flag.READONLY); }}), fieldName, pkgID, readOnlyFieldType, recordSymbol, ((BLangNode) field).pos, VIRTUAL); newFields.put(key, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags); if (applicableMappingType.tag == TypeTags.MAP) { recordType.sealed = false; recordType.restFieldType = ((BMapType) applicableMappingType).constraint; } else { BRecordType applicableRecordType = (BRecordType) applicableMappingType; boolean allReadOnlyFields = true; for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) { String fieldName = origEntry.getKey(); BField field = origEntry.getValue(); if (readOnlyFields.containsKey(fieldName)) { continue; } BVarSymbol origFieldSymbol = field.symbol; long origFieldFlags = origFieldSymbol.flags; if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) { allReadOnlyFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID, origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL); newFields.put(fieldName, new BField(field.name, null, fieldSymbol)); recordSymbol.scope.define(field.name, fieldSymbol); } recordType.sealed = applicableRecordType.sealed; recordType.restFieldType = applicableRecordType.restFieldType; if (recordType.sealed && allReadOnlyFields) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } recordType.fields = newFields; recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); if (applicableMappingType.tag == TypeTags.MAP) { recordLiteral.expectedType = applicableMappingType; } return recordType; } private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } dlog.resetErrorCount(); BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType, mappingConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (compatibleTypes.isEmpty()) { if (!erroredExpType) { reportIncompatibleMappingConstructorError(mappingConstructor, bType); } validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor); } if (tag == TypeTags.INTERSECTION) { return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor); } BType possibleType = getMappingConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.MAP: return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType : symTable.semanticError; case TypeTags.RECORD: boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType); boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType, mappingConstructor.fields, mappingConstructor.pos); return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError; case TypeTags.READONLY: return checkReadOnlyMappingType(mappingConstructor); } reportIncompatibleMappingConstructorError(mappingConstructor, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return checkMappingConstructorCompatibility(inferredType, mappingConstructor); } for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BLangExpression exprToCheck; if (field.isKeyValueField()) { exprToCheck = ((BLangRecordKeyValueField) field).valueExpr; } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } else { exprToCheck = (BLangRecordVarNameField) field; } if (exprIncompatible(symTable.readonlyType, exprToCheck)) { return symTable.semanticError; } } return symTable.readonlyType; } private BType getMappingConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.MAP: case TypeTags.RECORD: case TypeTags.READONLY: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private boolean isMappingConstructorCompatibleType(BType type) { return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP; } private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) { if (expType == symTable.semanticError) { return; } if (expType.tag != TypeTags.UNION) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType); return; } BUnionType unionType = (BUnionType) expType; BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]); if (memberTypes.length == 2) { BRecordType recType = null; if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[0]; } else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[1]; } if (recType != null) { validateSpecifiedFields(mappingConstructorExpr, recType); validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos); return; } } for (BType bType : memberTypes) { if (isMappingConstructorCompatibleType(bType)) { dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR, unionType); return; } } dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType); } private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) { boolean isFieldsValid = true; for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BType checkedType = checkMappingField(field, possibleType); if (isFieldsValid && checkedType == symTable.semanticError) { isFieldsValid = false; } } return isFieldsValid; } private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields, Location pos) { HashSet<String> specFieldNames = getFieldNames(specifiedFields); boolean hasAllRequiredFields = true; for (BField field : type.fields.values()) { String fieldName = field.name.value; if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED) && !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); if (hasAllRequiredFields) { hasAllRequiredFields = false; } } } return hasAllRequiredFields; } private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) { HashSet<String> fieldNames = new HashSet<>(); for (RecordLiteralNode.RecordField specifiedField : specifiedFields) { if (specifiedField.isKeyValueField()) { String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField); if (name == null) { continue; } fieldNames.add(name); } else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField)); } else { fieldNames.addAll(getSpreadOpFieldRequiredFieldNames( (BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField)); } } return fieldNames; } private String getKeyValueFieldName(BLangRecordKeyValueField field) { BLangRecordKey key = field.key; if (key.computedKey) { return null; } BLangExpression keyExpr = key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return ((BLangSimpleVarRef) keyExpr).variableName.value; } else if (keyExpr.getKind() == NodeKind.LITERAL) { return (String) ((BLangLiteral) keyExpr).value; } return null; } private String getVarNameFieldName(BLangRecordVarNameField field) { return field.variableName.value; } private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) { BType spreadType = checkExpr(field.expr, env); if (spreadType.tag != TypeTags.RECORD) { return Collections.emptyList(); } List<String> fieldNames = new ArrayList<>(); for (BField bField : ((BRecordType) spreadType).getFields().values()) { if (!Symbols.isOptional(bField.symbol)) { fieldNames.add(bField.name.value); } } return fieldNames; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier != null) { String workerName = workerFlushExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } else { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName)); if (symbol != symTable.notFoundSymbol) { workerFlushExpr.workerSymbol = symbol; } } } BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(workerFlushExpr, actualType, expType); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if (symTable.notFoundSymbol.equals(symbol)) { syncSendExpr.workerType = symTable.semanticError; } else { syncSendExpr.workerType = symbol.type; syncSendExpr.workerSymbol = symbol; } syncSendExpr.env = this.env; checkExpr(syncSendExpr.expr, this.env); if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.getBType()); } String workerName = syncSendExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName); } syncSendExpr.expectedType = expType; resultType = expType == symTable.noType ? symTable.nilType : expType; } @Override public void visit(BLangWorkerReceive workerReceiveExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier)); workerReceiveExpr.env = this.env; if (symTable.notFoundSymbol.equals(symbol)) { workerReceiveExpr.workerType = symTable.semanticError; } else { workerReceiveExpr.workerType = symbol.type; workerReceiveExpr.workerSymbol = symbol; } if (symTable.noType == this.expType) { this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION); } workerReceiveExpr.setBType(this.expType); resultType = this.expType; } private boolean workerExists(SymbolEnv env, String workerName) { if (workerName.equals(DEFAULT_WORKER_NAME)) { return true; } BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName)); return symbol != this.symTable.notFoundSymbol && symbol.type.tag == TypeTags.FUTURE && ((BFutureType) symbol.type).workerDerivative; } @Override public void visit(BLangConstRef constRef) { constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env, names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName)); types.setImplicitCastExpr(constRef, constRef.getBType(), expType); resultType = constRef.getBType(); } public void visit(BLangSimpleVarRef varRefExpr) { BType actualType = symTable.semanticError; Name varName = names.fromIdNode(varRefExpr.variableName); if (varName == Names.IGNORE) { if (varRefExpr.isLValue) { varRefExpr.setBType(this.symTable.anyType); } else { varRefExpr.setBType(this.symTable.semanticError); dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED); } varRefExpr.symbol = new BVarSymbol(0, true, varName, env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner, varRefExpr.pos, VIRTUAL); resultType = varRefExpr.getBType(); return; } Name compUnitName = getCurrentCompUnit(varRefExpr); varRefExpr.pkgSymbol = symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName); if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) { varRefExpr.symbol = symTable.notFoundSymbol; dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias); } if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { actualType = symTable.stringType; } else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env, names.fromIdNode(varRefExpr.pkgAlias), varName); if (symbol == symTable.notFoundSymbol && env.enclType != null) { Name objFuncName = names.fromString(Symbols .getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value)); symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName, env.enclType.getBType().tsymbol); } if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) { BVarSymbol varSym = (BVarSymbol) symbol; checkSelfReferences(varRefExpr.pos, env, varSym); varRefExpr.symbol = varSym; actualType = varSym.type; markAndRegisterClosureVariable(symbol, varRefExpr.pos, env); } else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) { actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null); varRefExpr.symbol = symbol; } else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) symbol; varRefExpr.symbol = constSymbol; BType symbolType = symbol.type; if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE || (expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream() .anyMatch(memType -> memType.tag == TypeTags.FINITE && types.isAssignable(symbolType, memType)))) { actualType = symbolType; } else { actualType = constSymbol.literalType; } if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) { actualType = symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE); } } else { varRefExpr.symbol = symbol; logUndefinedSymbolError(varRefExpr.pos, varName.value); } } if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE); return; } resultType = types.checkType(varRefExpr, actualType, expType); } @Override public void visit(BLangRecordVarRef varRefExpr) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName), env.enclPkg.symbol.pkgID, null, env.scope.owner, varRefExpr.pos, SOURCE); symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env); boolean unresolvedReference = false; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference; bLangVarReference.isLValue = true; checkExpr(recordRefField.variableReference, env); if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol || !isValidVariableReference(recordRefField.variableReference)) { unresolvedReference = true; continue; } BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol; BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos, new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol, varRefExpr.pos, SOURCE)); fields.put(field.name.value, field); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { checkExpr(restParam, env); unresolvedReference = !isValidVariableReference(restParam); } if (unresolvedReference) { resultType = symTable.semanticError; return; } BRecordType bRecordType = new BRecordType(recordSymbol); bRecordType.fields = fields; recordSymbol.type = bRecordType; varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos, SOURCE); if (restParam == null) { bRecordType.sealed = true; bRecordType.restFieldType = symTable.noType; } else if (restParam.getBType() == symTable.semanticError) { bRecordType.restFieldType = symTable.mapType; } else { BType restFieldType; if (restParam.getBType().tag == TypeTags.RECORD) { restFieldType = ((BRecordType) restParam.getBType()).restFieldType; } else if (restParam.getBType().tag == TypeTags.MAP) { restFieldType = ((BMapType) restParam.getBType()).constraint; } else { restFieldType = restParam.getBType(); } bRecordType.restFieldType = restFieldType; } resultType = bRecordType; } @Override public void visit(BLangErrorVarRef varRefExpr) { if (varRefExpr.typeNode != null) { BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env); varRefExpr.setBType(bType); checkIndirectErrorVarRef(varRefExpr); resultType = bType; return; } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) { dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, varRefExpr.message.getBType()); } } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) { dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType, varRefExpr.cause.getBType()); } } boolean unresolvedReference = false; for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { BLangVariableReference refItem = (BLangVariableReference) detailItem.expr; refItem.isLValue = true; checkExpr(refItem, env); if (!isValidVariableReference(refItem)) { unresolvedReference = true; continue; } if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN, refItem); unresolvedReference = true; continue; } if (refItem.symbol == null) { unresolvedReference = true; } } if (varRefExpr.restVar != null) { varRefExpr.restVar.isLValue = true; if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { checkExpr(varRefExpr.restVar, env); unresolvedReference = unresolvedReference || varRefExpr.restVar.symbol == null || !isValidVariableReference(varRefExpr.restVar); } } if (unresolvedReference) { resultType = symTable.semanticError; return; } BType errorRefRestFieldType; if (varRefExpr.restVar == null) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { errorRefRestFieldType = varRefExpr.restVar.getBType(); } else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) { errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint; } else { dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, varRefExpr.restVar.getBType(), symTable.detailType); resultType = symTable.semanticError; return; } BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly ? symTable.errorType.detailType : new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC); resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType); } private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) { for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { checkExpr(detailItem.expr, env); checkExpr(detailItem, env, detailItem.expr.getBType()); } if (varRefExpr.restVar != null) { checkExpr(varRefExpr.restVar, env); } if (varRefExpr.message != null) { varRefExpr.message.isLValue = true; checkExpr(varRefExpr.message, env); } if (varRefExpr.cause != null) { varRefExpr.cause.isLValue = true; checkExpr(varRefExpr.cause, env); } } @Override public void visit(BLangTupleVarRef varRefExpr) { List<BType> results = new ArrayList<>(); for (int i = 0; i < varRefExpr.expressions.size(); i++) { ((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true; results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType)); } BTupleType actualType = new BTupleType(results); if (varRefExpr.restParam != null) { BLangExpression restExpr = (BLangExpression) varRefExpr.restParam; ((BLangVariableReference) restExpr).isLValue = true; BType checkedType = checkExpr(restExpr, env, symTable.noType); if (checkedType.tag != TypeTags.ARRAY) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType); resultType = symTable.semanticError; return; } actualType.restType = ((BArrayType) checkedType).eType; } resultType = types.checkType(varRefExpr, actualType, expType); } /** * This method will recursively check if a multidimensional array has at least one open sealed dimension. * * @param arrayType array to check if open sealed * @return true if at least one dimension is open sealed */ public boolean isArrayOpenSealedType(BArrayType arrayType) { if (arrayType.state == BArrayState.INFERRED) { return true; } if (arrayType.eType.tag == TypeTags.ARRAY) { return isArrayOpenSealedType((BArrayType) arrayType.eType); } return false; } /** * This method will recursively traverse and find the symbol environment of a lambda node (which is given as the * enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the * enclosing invokable node's environment, which are outside of the scope of a lambda function. */ private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclInvokable != null && env.enclInvokable == encInvokable) { return findEnclosingInvokableEnv(env.enclEnv, encInvokable); } return env; } private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) { return env.enclEnv; } if (env.enclType != null && env.enclType == recordTypeNode) { return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode); } return env; } private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) { return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) && param.getBType().tag == symbol.type.tag)); } public void visit(BLangFieldBasedAccess fieldAccessExpr) { markLeafNode(fieldAccessExpr); BLangExpression containerExpression = fieldAccessExpr.expr; if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = fieldAccessExpr.isCompoundAssignmentLValue; } BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env)); if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess && !isXmlAccess(fieldAccessExpr)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION); resultType = symTable.semanticError; return; } BType actualType; if (fieldAccessExpr.optionalFieldAccess) { if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS); resultType = symTable.semanticError; return; } actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); } else { actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); if (actualType != symTable.semanticError && (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(varRefType)) { if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return; } } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) && isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, fieldAccessExpr.field.value, varRefType); resultType = symTable.semanticError; return; } } } resultType = types.checkType(fieldAccessExpr, actualType, this.expType); } private boolean isAllReadonlyTypes(BType type) { if (type.tag != TypeTags.UNION) { return Symbols.isFlagOn(type.flags, Flags.READONLY); } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isAllReadonlyTypes(memberType)) { return false; } } return true; } private boolean isInitializationInInit(BType type) { BObjectType objectType = (BObjectType) type; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; return env.enclInvokable != null && initializerFunc != null && env.enclInvokable.symbol == initializerFunc.symbol; } private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) { if (type.tag == TypeTags.RECORD) { if (Symbols.isFlagOn(type.flags, Flags.READONLY)) { return true; } BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (!field.name.value.equals(fieldName)) { continue; } return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY); } return recordType.sealed; } boolean allInvalidUpdates = true; for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) { allInvalidUpdates = false; } } return allInvalidUpdates; } private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; BType exprType = expr.getBType(); if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) { return true; } if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr) && exprType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes(); return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType); } return false; } public void visit(BLangIndexBasedAccess indexBasedAccessExpr) { markLeafNode(indexBasedAccessExpr); BLangExpression containerExpression = indexBasedAccessExpr.expr; if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, ((BLangTypedescExpr) containerExpression).typeNode); resultType = symTable.semanticError; return; } if (containerExpression instanceof BLangValueExpression) { ((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue; ((BLangValueExpression) containerExpression).isCompoundAssignmentLValue = indexBasedAccessExpr.isCompoundAssignmentLValue; } boolean isStringValue = containerExpression.getBType() != null && containerExpression.getBType().tag == TypeTags.STRING; if (!isStringValue) { checkExpr(containerExpression, this.env, symTable.noType); } if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY && containerExpression.getBType().tag != TypeTags.TABLE) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED, containerExpression.getBType()); resultType = symTable.semanticError; return; } BType actualType = checkIndexAccessExpr(indexBasedAccessExpr); BType exprType = containerExpression.getBType(); BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; if (actualType != symTable.semanticError && (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) { if (isAllReadonlyTypes(exprType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, exprType); resultType = symTable.semanticError; return; } else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && (indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) && isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, getConstFieldName(indexExpr), exprType); resultType = symTable.semanticError; return; } } if (indexBasedAccessExpr.isLValue) { indexBasedAccessExpr.originalType = actualType; indexBasedAccessExpr.setBType(actualType); resultType = actualType; return; } this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType); } public void visit(BLangInvocation iExpr) { if (iExpr.expr == null) { checkFunctionInvocationExpr(iExpr); return; } if (invalidModuleAliasUsage(iExpr)) { return; } checkExpr(iExpr.expr, this.env, symTable.noType); BType varRefType = iExpr.expr.getBType(); switch (varRefType.tag) { case TypeTags.OBJECT: checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType); break; case TypeTags.RECORD: checkFieldFunctionPointer(iExpr, this.env); break; case TypeTags.NONE: dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name); break; case TypeTags.SEMANTIC_ERROR: break; default: checkInLangLib(iExpr, varRefType); } } public void visit(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef; if (userProvidedTypeRef != null) { symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR); } validateErrorConstructorPositionalArgs(errorConstructorExpr); List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr); List<BType> errorDetailTypes = new ArrayList<>(); for (BType expandedCandidate : expandedCandidates) { BType detailType = ((BErrorType) expandedCandidate).detailType; errorDetailTypes.add(detailType); } BType detailCandidate; if (errorDetailTypes.size() == 1) { detailCandidate = errorDetailTypes.get(0); } else { detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes)); } BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr); BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env); int index = errorDetailTypes.indexOf(inferredDetailType); BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index); if (selectedCandidate != symTable.semanticError && (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) { checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType); resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); return; } if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType); } BErrorType errorType; if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) { errorType = (BErrorType) userProvidedTypeRef.getBType(); } else if (expandedCandidates.size() == 1) { errorType = (BErrorType) expandedCandidates.get(0); } else { errorType = symTable.errorType; } List<BLangNamedArgsExpression> namedArgs = checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType); BType detailType = errorType.detailType; if (detailType.tag == TypeTags.MAP) { BType errorDetailTypeConstraint = ((BMapType) detailType).constraint; for (BLangNamedArgsExpression namedArgExpr: namedArgs) { if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) { dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType()); } } } else if (detailType.tag == TypeTags.RECORD) { BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType; LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream() .filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED) .map(f -> f.name.value) .collect(Collectors.toCollection(LinkedList::new)); LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields; for (BLangNamedArgsExpression namedArg : namedArgs) { BField field = targetFields.get(namedArg.name.value); Location pos = namedArg.pos; if (field == null) { if (targetErrorDetailRec.sealed) { dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC, namedArg.name, targetErrorDetailRec); } else if (targetFields.isEmpty() && !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE, namedArg.name, targetErrorDetailRec); } } else { missingRequiredFields.remove(namedArg.name.value); if (!types.isAssignable(namedArg.expr.getBType(), field.type)) { dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArg.name, field.type, namedArg.expr.getBType()); } } } for (String requiredField : missingRequiredFields) { dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField); } } if (userProvidedTypeRef != null) { errorConstructorExpr.setBType(userProvidedTypeRef.getBType()); } else { errorConstructorExpr.setBType(errorType); } resultType = errorConstructorExpr.getBType(); } private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) { if (errorConstructorExpr.positionalArgs.isEmpty()) { return; } checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType); int positionalArgCount = errorConstructorExpr.positionalArgs.size(); if (positionalArgCount > 1) { checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType); } } private BType checkExprSilent(BLangRecordLiteral recordLiteral, BType expType, SymbolEnv env) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int errorCount = this.dlog.errorCount(); this.dlog.mute(); BType type = checkExpr(recordLiteral, env, expType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return type; } private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) { BLangRecordKeyValueField field = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); field.valueExpr = (BLangExpression) namedArg.getExpression(); BLangLiteral expr = new BLangLiteral(); expr.value = namedArg.getName().value; expr.setBType(symTable.stringType); field.key = new BLangRecordKey(expr); recordLiteral.fields.add(field); } return recordLiteral; } private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) { BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef; if (errorTypeRef == null) { if (expType.tag == TypeTags.ERROR) { return List.of(expType); } else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) { return expandExpectedErrorTypes(expType); } } else { if (errorTypeRef.getBType().tag != TypeTags.ERROR) { if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) { dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef); } } else { return List.of(errorTypeRef.getBType()); } } return List.of(symTable.errorType); } private List<BType> expandExpectedErrorTypes(BType candidateType) { List<BType> expandedCandidates = new ArrayList<>(); if (candidateType.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) { if (types.isAssignable(memberType, symTable.errorType)) { if (memberType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) memberType).effectiveType); } else { expandedCandidates.add(memberType); } } } } else if (types.isAssignable(candidateType, symTable.errorType)) { if (candidateType.tag == TypeTags.INTERSECTION) { expandedCandidates.add(((BIntersectionType) candidateType).effectiveType); } else { expandedCandidates.add(candidateType); } } return expandedCandidates; } public void visit(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr == null) { checkFunctionInvocationExpr(aInv); return; } if (invalidModuleAliasUsage(aInv)) { return; } checkExpr(aInv.expr, this.env, symTable.noType); BLangExpression varRef = aInv.expr; switch (varRef.getBType().tag) { case TypeTags.OBJECT: checkActionInvocation(aInv, (BObjectType) varRef.getBType()); break; case TypeTags.RECORD: checkFieldFunctionPointer(aInv, this.env); break; case TypeTags.NONE: dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name); resultType = symTable.semanticError; break; case TypeTags.SEMANTIC_ERROR: default: dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType()); resultType = symTable.semanticError; break; } } private boolean invalidModuleAliasUsage(BLangInvocation invocation) { Name pkgAlias = names.fromIdNode(invocation.pkgAlias); if (pkgAlias != Names.EMPTY) { dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE); return true; } return false; } public void visit(BLangLetExpression letExpression) { BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())), new Name(String.format("$let_symbol_%d$", letCount++)), env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner, letExpression.pos); letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env); } BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType); types.checkType(letExpression, exprType, this.expType); } private void checkInLangLib(BLangInvocation iExpr, BType varRefType) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value, iExpr.expr.getBType()); resultType = symTable.semanticError; return; } if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) { return; } checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType); } private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType, BSymbol langLibMethodSymbol) { if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) { return false; } String packageId = langLibMethodSymbol.pkgID.name.value; if (!modifierFunctions.containsKey(packageId)) { return false; } String funcName = langLibMethodSymbol.name.value; if (!modifierFunctions.get(packageId).contains(funcName)) { return false; } if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) { return false; } if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) { return false; } dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return true; } private boolean isFixedLengthList(BType type) { switch(type.tag) { case TypeTags.ARRAY: return (((BArrayType) type).state != BArrayState.OPEN); case TypeTags.TUPLE: return (((BTupleType) type).restType == null); case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType member : unionType.getMemberTypes()) { if (!isFixedLengthList(member)) { return false; } } return true; default: return false; } } private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) { String invocationName = iExpr.name.getValue(); if (!listLengthModifierFunctions.contains(invocationName)) { return; } if (isFixedLengthList(varRefType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName, varRefType); resultType = symTable.semanticError; return; } if (isShiftOnIncompatibleTuples(varRefType, invocationName)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName, varRefType); resultType = symTable.semanticError; return; } } private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) { if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) && hasDifferentTypeThanRest((BTupleType) varRefType)) { return true; } if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) { BUnionType unionVarRef = (BUnionType) varRefType; boolean allMemberAreFixedShapeTuples = true; for (BType member : unionVarRef.getMemberTypes()) { if (member.tag != TypeTags.TUPLE) { allMemberAreFixedShapeTuples = false; break; } if (!hasDifferentTypeThanRest((BTupleType) member)) { allMemberAreFixedShapeTuples = false; break; } } return allMemberAreFixedShapeTuples; } return false; } private boolean hasDifferentTypeThanRest(BTupleType tupleType) { if (tupleType.restType == null) { return false; } for (BType member : tupleType.getTupleTypes()) { if (!types.isSameType(tupleType.restType, member)) { return true; } } return false; } private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) { BType type = checkExpr(iExpr.expr, env); BLangIdentifier invocationIdentifier = iExpr.name; if (type == symTable.semanticError) { return false; } BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier), type.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD, invocationIdentifier, type); return false; } if (fieldSymbol.kind != SymbolKind.FUNCTION) { checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD, fieldSymbol.type); return false; } iExpr.symbol = fieldSymbol; iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType); checkInvocationParamAndReturnType(iExpr); iExpr.functionPointerInvocation = true; return true; } private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos, DiagnosticErrorCode errCode, Object... diagMsgArgs) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(pos, errCode, diagMsgArgs); resultType = symTable.semanticError; } else { checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol); } } @Override public void visit(BLangObjectConstructorExpression objectCtorExpression) { if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) { BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType(); if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) { BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType; objectType.typeIdSet = expObjType.typeIdSet; } else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) { if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) { dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR, objectCtorExpression.expectedType); resultType = symTable.semanticError; return; } } } visit(objectCtorExpression.typeInit); } private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) { if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) { return false; } Set<BType> visitedTypes = new HashSet<>(); if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) { return false; } return typeIdSets.size() <= 1; } private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) { if (type.tag == TypeTags.OBJECT) { var objectType = (BObjectType) type; typeIdSets.add(objectType.typeIdSet); return true; } if (type.tag == TypeTags.UNION) { if (!visitedTypes.add(type)) { return true; } for (BType member : ((BUnionType) type).getMemberTypes()) { if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) { return false; } } return true; } return false; } private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) { Set<BTypeIdSet> typeIdSets = new HashSet<>(); if (!isDefiniteObjectType(type, typeIdSets)) { return false; } if (typeIdSets.isEmpty()) { objectType.typeIdSet = BTypeIdSet.emptySet(); return true; } var typeIdIterator = typeIdSets.iterator(); if (typeIdIterator.hasNext()) { BTypeIdSet typeIdSet = typeIdIterator.next(); objectType.typeIdSet = typeIdSet; return true; } return true; } public void visit(BLangTypeInit cIExpr) { if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType); resultType = symTable.semanticError; return; } BType actualType; if (cIExpr.userDefinedType != null) { actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env); } else { actualType = expType; } if (actualType == symTable.semanticError) { resultType = symTable.semanticError; return; } if (actualType.tag == TypeTags.INTERSECTION) { actualType = ((BIntersectionType) actualType).effectiveType; } switch (actualType.tag) { case TypeTags.OBJECT: BObjectType actualObjectType = (BObjectType) actualType; if (isObjectConstructorExpr(cIExpr, actualObjectType)) { BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr, env); List<BLangType> typeRefs = classDefForConstructor.typeRefs; SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol); if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, false); } else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags, Flags.READONLY)) { handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv, true); } else { semanticAnalyzer.analyzeNode(classDefForConstructor, pkgEnv); } markConstructedObjectIsolatedness(actualObjectType); } if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return; } if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) { return; } } break; case TypeTags.STREAM: if (cIExpr.initInvocation.argExprs.size() > 1) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation); resultType = symTable.semanticError; return; } BStreamType actualStreamType = (BStreamType) actualType; if (actualStreamType.completionType != null) { BType completionType = actualStreamType.completionType; if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString()); resultType = symTable.semanticError; return; } } if (!cIExpr.initInvocation.argExprs.isEmpty()) { BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0); BType constructType = checkExpr(iteratorExpr, env, symTable.noType); BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType); if (constructType.tag != TypeTags.OBJECT) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType, BLangCompilerConstants.CLOSE_FUNC); if (closeFunc != null) { BType closeableIteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type; if (!types.isAssignable(constructType, closeableIteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } else { BType iteratorType = symTable.langQueryModuleSymbol.scope .lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type; if (!types.isAssignable(constructType, iteratorType)) { dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR, expectedNextReturnType, constructType); resultType = symTable.semanticError; return; } } BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType); if (nextReturnType != null) { types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } else { dlog.error(constructType.tsymbol.getPosition(), DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType); } } if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) { dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType, actualType); resultType = symTable.semanticError; return; } resultType = actualType; return; case TypeTags.UNION: List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType); BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType); cIExpr.initInvocation.setBType(symTable.nilType); if (matchedType.tag == TypeTags.OBJECT) { if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType); actualType = matchedType; break; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) { return; } } } types.checkType(cIExpr, matchedType, expType); cIExpr.setBType(matchedType); resultType = matchedType; return; default: dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType); resultType = symTable.semanticError; return; } if (cIExpr.initInvocation.getBType() == null) { cIExpr.initInvocation.setBType(symTable.nilType); } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType()); resultType = types.checkType(cIExpr, actualTypeInitType, expType); } private BUnionType createNextReturnType(Location pos, BStreamType streamType) { BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS); recordType.restFieldType = symTable.noType; recordType.sealed = true; Name fieldName = Names.VALUE; BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC, fieldName, env.enclPkg.packageID, streamType.constraint, env.scope.owner, pos, VIRTUAL)); field.type = streamType.constraint; recordType.fields.put(field.name.value, field); recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, recordType, env.scope.owner, pos, VIRTUAL); recordType.tsymbol.scope = new Scope(env.scope.owner); recordType.tsymbol.scope.define(fieldName, field.symbol); LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(recordType); retTypeMembers.addAll(types.getAllTypes(streamType.completionType)); retTypeMembers.add(symTable.nilType); BUnionType unionType = BUnionType.create(null); unionType.addAll(retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL); return unionType; } private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) { if (!cIExpr.initInvocation.argExprs.isEmpty() && ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) { dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.name.value); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return false; } return true; } private BType getObjectConstructorReturnType(BType objType, BType initRetType) { if (initRetType.tag == TypeTags.UNION) { LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(objType); retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes()); retTypeMembers.remove(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, symTable.builtinPos, VIRTUAL); return unionType; } else if (initRetType.tag == TypeTags.NIL) { return objType; } return symTable.semanticError; } private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) { int objectCount = 0; for (BType memberType : lhsUnionType.getMemberTypes()) { int tag = memberType.tag; if (tag == TypeTags.OBJECT) { objectCount++; continue; } if (tag != TypeTags.INTERSECTION) { continue; } if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) { objectCount++; } } boolean containsSingleObject = objectCount == 1; List<BType> matchingLhsMemberTypes = new ArrayList<>(); for (BType memberType : lhsUnionType.getMemberTypes()) { if (memberType.tag != TypeTags.OBJECT) { continue; } if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol); } if (containsSingleObject) { return Collections.singletonList(memberType); } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc; if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) { matchingLhsMemberTypes.add(memberType); } } return matchingLhsMemberTypes; } private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) { if (matchingLhsMembers.isEmpty()) { dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } else if (matchingLhsMembers.size() == 1) { return matchingLhsMembers.get(0).tsymbol.type; } else { dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } } private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) { invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType)); if (function == null) { return invocationArguments.isEmpty(); } if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) { return true; } List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); List<BLangExpression> positionalArgs = new ArrayList<>(); for (BLangExpression argument : invocationArguments) { if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) { namedArgs.add((BLangNamedArgsExpression) argument); } else { positionalArgs.add(argument); } } List<BVarSymbol> requiredParams = function.symbol.params.stream() .filter(param -> !param.isDefaultable) .collect(Collectors.toList()); if (requiredParams.size() > invocationArguments.size()) { return false; } List<BVarSymbol> defaultableParams = function.symbol.params.stream() .filter(param -> param.isDefaultable) .collect(Collectors.toList()); int givenRequiredParamCount = 0; for (int i = 0; i < positionalArgs.size(); i++) { if (function.symbol.params.size() > i) { givenRequiredParamCount++; BVarSymbol functionParam = function.symbol.params.get(i); if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); continue; } if (function.symbol.restParam != null) { BType restParamType = ((BArrayType) function.symbol.restParam.type).eType; if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) { return false; } continue; } return false; } for (BLangNamedArgsExpression namedArg : namedArgs) { boolean foundNamedArg = false; List<BVarSymbol> params = function.symbol.params; for (int i = givenRequiredParamCount; i < params.size(); i++) { BVarSymbol functionParam = params.get(i); if (!namedArg.name.value.equals(functionParam.name.value)) { continue; } foundNamedArg = true; BType namedArgExprType = checkExpr(namedArg.expr, env); if (!types.isAssignable(functionParam.type, namedArgExprType)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); } if (!foundNamedArg) { return false; } } return requiredParams.size() <= 0; } public void visit(BLangWaitForAllExpr waitForAllExpr) { switch (expType.tag) { case TypeTags.RECORD: checkTypesForRecords(waitForAllExpr); break; case TypeTags.MAP: checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint); LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypesForMap.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypesForMap.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap); resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol); break; case TypeTags.NONE: case TypeTags.ANY: checkTypesForMap(waitForAllExpr, expType); LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypes.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintType = BUnionType.create(null, memberTypes); resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol); break; default: dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos)); resultType = symTable.semanticError; break; } waitForAllExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType); } } private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr, Location pos) { BRecordType retType = new BRecordType(null, Flags.ANONYMOUS); List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs; for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BLangIdentifier fieldName; if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { fieldName = keyVal.key; } else { fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName; } BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName)); BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type; BField field = new BField(names.fromIdNode(keyVal.key), null, new BVarSymbol(0, names.fromIdNode(keyVal.key), env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL)); retType.fields.put(field.name.value, field); } retType.restFieldType = symTable.noType; retType.sealed = true; retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null, pos, VIRTUAL); return retType; } private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType(); if (bType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) bType).constraint); } else { memberTypes.add(bType); } } return memberTypes; } private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) { List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs; keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType)); } private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) { List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs(); Map<String, BField> lhsFields = ((BRecordType) expType).fields; if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitExpr, waitExpr.pos)); resultType = symTable.semanticError; return; } for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) { String key = keyVal.key.value; if (!lhsFields.containsKey(key)) { if (((BRecordType) expType).sealed) { dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType); resultType = symTable.semanticError; } else { BType restFieldType = ((BRecordType) expType).restFieldType; checkWaitKeyValExpr(keyVal, restFieldType); } } else { checkWaitKeyValExpr(keyVal, lhsFields.get(key).type); } } checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos); if (symTable.semanticError != resultType) { resultType = expType; } } private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs, Location pos) { type.fields.values().forEach(field -> { boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value)); if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name); } }); } private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) { BLangExpression expr; if (keyVal.keyExpr != null) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode (((BLangSimpleVarRef) keyVal.keyExpr).variableName)); keyVal.keyExpr.setBType(symbol.type); expr = keyVal.keyExpr; } else { expr = keyVal.valueExpr; } BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null); checkExpr(expr, env, futureType); setEventualTypeForExpression(expr, type); } private void setEventualTypeForExpression(BLangExpression expression, BType currentExpectedType) { if (expression == null) { return; } if (isSimpleWorkerReference(expression)) { return; } BFutureType futureType = (BFutureType) expression.expectedType; BType currentType = futureType.constraint; if (types.containsErrorType(currentType)) { return; } BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType); if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) && !types.isAssignable(eventualType, currentExpectedType)) { dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); } futureType.constraint = eventualType; } private void setEventualTypeForWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (types.containsErrorType(resultType))) { return; } if (isSimpleWorkerReference(expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) { if ((resultType == symTable.semanticError) || (expression.getKind() != NodeKind.BINARY_EXPR) || (types.containsErrorType(resultType))) { return; } if (types.containsErrorType(resultType)) { return; } if (!isReferencingNonWorker((BLangBinaryExpr) expression)) { return; } BType currentExpectedType = ((BFutureType) expType).constraint; BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType); if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) { resultType = eventualType; return; } if (!types.isAssignable(eventualType, currentExpectedType)) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType, eventualType, expression); resultType = symTable.semanticError; return; } if (resultType.tag == TypeTags.FUTURE) { ((BFutureType) resultType).constraint = eventualType; } else { resultType = eventualType; } } private boolean isSimpleWorkerReference(BLangExpression expression) { if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression); BSymbol varRefSymbol = simpleVarRef.symbol; if (varRefSymbol == null) { return false; } if (workerExists(env, simpleVarRef.variableName.value)) { return true; } return false; } private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) { BLangExpression lhsExpr = binaryExpr.lhsExpr; BLangExpression rhsExpr = binaryExpr.rhsExpr; if (isReferencingNonWorker(lhsExpr)) { return true; } return isReferencingNonWorker(rhsExpr); } private boolean isReferencingNonWorker(BLangExpression expression) { if (expression.getKind() == NodeKind.BINARY_EXPR) { return isReferencingNonWorker((BLangBinaryExpr) expression); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression; BSymbol varRefSymbol = simpleVarRef.symbol; String varRefSymbolName = varRefSymbol.getName().value; if (workerExists(env, varRefSymbolName)) { return false; } } return true; } public void visit(BLangTernaryExpr ternaryExpr) { BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType); SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env); BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType); SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env); BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType); if (condExprType == symTable.semanticError || thenType == symTable.semanticError || elseType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isAssignable(elseType, thenType)) { resultType = thenType; } else if (types.isAssignable(thenType, elseType)) { resultType = elseType; } else { dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType); resultType = symTable.semanticError; } } else { resultType = expType; } } public void visit(BLangWaitExpr waitExpr) { expType = new BFutureType(TypeTags.FUTURE, expType, null); checkExpr(waitExpr.getExpression(), env, expType); if (resultType.tag == TypeTags.UNION) { LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>()); if (memberTypes.size() == 1) { resultType = memberTypes.toArray(new BType[0])[0]; } else { resultType = BUnionType.create(null, memberTypes); } } else if (resultType != symTable.semanticError) { resultType = ((BFutureType) resultType).constraint; } BLangExpression waitFutureExpression = waitExpr.getExpression(); if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) { setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos); } else { setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos); } waitExpr.setBType(resultType); if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint); } } private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) { for (BType memberType : unionType.getMemberTypes()) { if (memberType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) memberType).constraint); } else { memberTypes.add(memberType); } } return memberTypes; } @Override public void visit(BLangTrapExpr trapExpr) { boolean firstVisit = trapExpr.expr.getBType() == null; BType actualType; BType exprType = checkExpr(trapExpr.expr, env, expType); boolean definedWithVar = expType == symTable.noType; if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = trapExpr.getBType(); exprType = trapExpr.expr.getBType(); } } if (expType == symTable.semanticError || exprType == symTable.semanticError) { actualType = symTable.semanticError; } else { LinkedHashSet<BType> resultTypes = new LinkedHashSet<>(); if (exprType.tag == TypeTags.UNION) { resultTypes.addAll(((BUnionType) exprType).getMemberTypes()); } else { resultTypes.add(exprType); } resultTypes.add(symTable.errorType); actualType = BUnionType.create(null, resultTypes); } resultType = types.checkType(trapExpr, actualType, expType); if (definedWithVar && resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType); } } private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); expr.cloneAttempt++; BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if (errorCount == 0 && exprCompatibleType != symTable.semanticError) { return checkExpr(expr, env, binaryExpr.expectedType); } else { return checkExpr(expr, env); } } private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) { while (env != null && env.node != node) { env = env.enclEnv; } return env != null && env.enclEnv != null ? env.enclEnv.createClone() : new SymbolEnv(node, null); } private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) { SymbolEnv clone = env.createClone(); while (clone != null && clone.node != node) { clone = clone.enclEnv; } if (clone != null) { clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv)); } else { clone = new SymbolEnv(node, null); } return clone; } private BLangNode getLastInputNodeFromEnv(SymbolEnv env) { while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) { env = env.enclEnv; } return env != null ? env.node : null; } public void visit(BLangTransactionalExpr transactionalExpr) { resultType = types.checkType(transactionalExpr, symTable.booleanType, expType); } public void visit(BLangCommitExpr commitExpr) { BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(commitExpr, actualType, expType); } private BType getXMLConstituents(BType type) { BType constituent = null; if (type.tag == TypeTags.XML) { constituent = ((BXMLType) type).constraint; } else if (TypeTags.isXMLNonSequenceType(type.tag)) { constituent = type; } return constituent; } private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) { if (expType.tag != TypeTags.DECIMAL) { return; } switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: checkExpr(binaryExpr.lhsExpr, env, expType); checkExpr(binaryExpr.rhsExpr, env, expType); break; default: break; } } public void visit(BLangElvisExpr elvisExpr) { BType lhsType = checkExpr(elvisExpr.lhsExpr, env); BType actualType = symTable.semanticError; if (lhsType != symTable.semanticError) { if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) { BUnionType unionType = (BUnionType) lhsType; LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream() .filter(type -> type.tag != TypeTags.NIL) .collect(Collectors.toCollection(LinkedHashSet::new)); if (memberTypes.size() == 1) { actualType = memberTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, memberTypes); } } else { dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS, lhsType); } } BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType); BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isSameType(rhsReturnType, lhsReturnType)) { resultType = lhsReturnType; } else { dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType); resultType = symTable.semanticError; } } else { resultType = expType; } } @Override public void visit(BLangGroupExpr groupExpr) { resultType = checkExpr(groupExpr.expression, env, expType); } public void visit(BLangTypedescExpr accessExpr) { if (accessExpr.resolvedType == null) { accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env); } int resolveTypeTag = accessExpr.resolvedType.tag; final BType actualType; if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) { actualType = new BTypedescType(accessExpr.resolvedType, null); } else { actualType = accessExpr.resolvedType; } resultType = types.checkType(accessExpr, actualType, expType); } public void visit(BLangUnaryExpr unaryExpr) { BType exprType; BType actualType = symTable.semanticError; if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = exprType; } } else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = new BTypedescType(exprType, null); } } else { boolean decimalNegation = OperatorKind.SUB.equals(unaryExpr.operator) && expType.tag == TypeTags.DECIMAL; boolean isAdd = OperatorKind.ADD.equals(unaryExpr.operator); exprType = (decimalNegation || isAdd) ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType); if (symbol == symTable.notFoundSymbol) { dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES, unaryExpr.operator, exprType); } else { unaryExpr.opSymbol = (BOperatorSymbol) symbol; actualType = symbol.type.getReturnType(); } } } resultType = types.checkType(unaryExpr, actualType, expType); } public void visit(BLangTypeConversionExpr conversionExpr) { BType actualType = symTable.semanticError; for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) { annAttachment.attachPoints.add(AttachPoint.Point.TYPE); semanticAnalyzer.analyzeNode(annAttachment, this.env); } BLangExpression expr = conversionExpr.expr; if (conversionExpr.typeNode == null) { if (!conversionExpr.annAttachments.isEmpty()) { resultType = checkExpr(expr, env, this.expType); } return; } BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos, symResolver.resolveTypeNode(conversionExpr.typeNode, env)); conversionExpr.targetType = targetType; boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; int errorCount = this.dlog.errorCount(); this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) { checkExpr(expr, env, targetType); } else { checkExpr(expr, env, symTable.noType); } BType exprType = expr.getBType(); if (types.isTypeCastable(expr, exprType, targetType, this.env)) { actualType = targetType; } else if (exprType != symTable.semanticError && exprType != symTable.noType) { dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType); } resultType = types.checkType(conversionExpr, actualType, this.expType); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType()); bLangLambdaFunction.capturedClosureEnv = env.createClone(); if (!this.nonErrorLoggingCheck) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); } resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BType expectedType = expType; if (expectedType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expectedType; BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE) .collect(Collectors.collectingAndThen(Collectors.toList(), list -> { if (list.size() != 1) { return null; } return list.get(0); } )); if (invokableType != null) { expectedType = invokableType; } } if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS); resultType = symTable.semanticError; return; } BInvokableType expectedInvocation = (BInvokableType) expectedType; populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes); bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType)); if (expectedInvocation.retType.tag == TypeTags.NONE) { expectedInvocation.retType = bLangArrowFunction.body.expr.getBType(); } resultType = bLangArrowFunction.funcType = expectedInvocation; } public void visit(BLangXMLQName bLangXMLQName) { String prefix = bLangXMLQName.prefix.value; resultType = types.checkType(bLangXMLQName, symTable.stringType, expType); if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty() && bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (bLangXMLQName.prefix.value.isEmpty()) { return; } BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix)); if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { return; } if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { logUndefinedSymbolError(bLangXMLQName.pos, prefix); bLangXMLQName.setBType(symTable.semanticError); return; } if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) { xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value, (BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos); } if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) { resultType = symTable.semanticError; return; } bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol; bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI; } private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix, BPackageSymbol pkgSymbol, Location pos) { BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env, names.fromString(localname), SymTag.CONSTANT); if (constSymbol == symTable.notFoundSymbol) { if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname); } return null; } BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol; if (constantSymbol.literalType.tag != TypeTags.STRING) { dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType); return null; } String constVal = (String) constantSymbol.value.value; int s = constVal.indexOf('{'); int e = constVal.lastIndexOf('}'); if (e > s + 1) { pkgSymbol.isUsed = true; String nsURI = constVal.substring(s + 1, e); String local = constVal.substring(e); return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos, SOURCE); } dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname); return null; } public void visit(BLangXMLAttribute bLangXMLAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env); BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name; checkExpr(name, xmlAttributeEnv, symTable.stringType); if (name.prefix.value.isEmpty()) { name.namespaceURI = null; } checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType); symbolEnter.defineNode(bLangXMLAttribute, env); } public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env); Set<String> usedPrefixes = new HashSet<>(); BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix; if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) { usedPrefixes.add(elemNamePrefix.value); } for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) { if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) { BLangXMLQuotedString value = attribute.value; if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) { dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION); } checkExpr(attribute, xmlElementEnv, symTable.noType); } BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix; if (prefix != null && !prefix.value.isEmpty()) { usedPrefixes.add(prefix.value); } } bLangXMLElementLiteral.attributes.forEach(attribute -> { if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) { checkExpr(attribute, xmlElementEnv, symTable.noType); } }); Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv); Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX); if (namespaces.containsKey(defaultNs)) { bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs); } for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) { if (usedPrefixes.contains(nsEntry.getKey().value)) { bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue()); } } validateTags(bLangXMLElementLiteral, xmlElementEnv); bLangXMLElementLiteral.modifiedChildren = concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType, this.expType); if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) { markChildrenAsImmutable(bLangXMLElementLiteral); } } private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) { BLangXMLQName attrName = (BLangXMLQName) attribute.name; return (attrName.prefix.value.isEmpty() && attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) || attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE); } public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) { if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) { return symTable.xmlElementType; } if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) { return symTable.xmlTextType; } if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) { return symTable.xmlPIType; } return symTable.xmlCommentType; } public void muteErrorLog() { this.nonErrorLoggingCheck = true; this.dlog.mute(); } public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) { this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(errorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } } public BType getXMLSequenceType(BType xmlSubType) { switch (xmlSubType.tag) { case TypeTags.XML_ELEMENT: return new BXMLType(symTable.xmlElementType, null); case TypeTags.XML_COMMENT: return new BXMLType(symTable.xmlCommentType, null); case TypeTags.XML_PI: return new BXMLType(symTable.xmlPIType, null); default: return symTable.xmlTextType; } } public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT && expType != symTable.noType) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, "XML Sequence"); resultType = symTable.semanticError; return; } List<BType> xmlTypesInSequence = new ArrayList<>(); for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) { resultType = checkExpr(expressionItem, env, expType); if (!xmlTypesInSequence.contains(resultType)) { xmlTypesInSequence.add(resultType); } } if (expType.tag == TypeTags.XML || expType == symTable.noType) { if (xmlTypesInSequence.size() == 1) { resultType = getXMLSequenceType(xmlTypesInSequence.get(0)); return; } resultType = symTable.xmlType; return; } if (expType.tag == TypeTags.XML_TEXT) { resultType = symTable.xmlTextType; return; } for (BType item : ((BUnionType) expType).getMemberTypes()) { if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) { dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.xmlType); resultType = symTable.semanticError; return; } } resultType = symTable.xmlType; } public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) { List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments; checkStringTemplateExprs(literalValues); BLangExpression xmlExpression = literalValues.get(0); if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL && ((String) ((BLangLiteral) xmlExpression).value).isEmpty()) { resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType); return; } resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType); } public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) { checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType, this.expType); } public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) { checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType); checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType); } public void visit(BLangXMLQuotedString bLangXMLQuotedString) { checkStringTemplateExprs(bLangXMLQuotedString.textFragments); resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { dlog.error(xmlAttributeAccessExpr.pos, DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS); resultType = symTable.semanticError; } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { checkStringTemplateExprs(stringTemplateLiteral.exprs); resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType); if (type == symTable.semanticError) { resultType = type; return; } BObjectType literalType = (BObjectType) type; BType stringsType = literalType.fields.get("strings").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } BType insertionsType = literalType.fields.get("insertions").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } resultType = type; } private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) { if (expType == symTable.noType || containsAnyType(expType)) { return symTable.rawTemplateType; } BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos); BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE); if (type == symTable.semanticError) { return type; } if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type); return symTable.semanticError; } BObjectType litObjType = (BObjectType) type; BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol; if (litObjType.fields.size() > 2) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType); type = symTable.semanticError; } if (!objTSymbol.attachedFuncs.isEmpty()) { dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType); type = symTable.semanticError; } return type; } private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType, DiagnosticCode code, Location pos) { BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType : ((BIntersectionType) fieldType).effectiveType; boolean errored = false; if (listType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) listType; if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) { dlog.error(pos, code, arrayType.size, exprs.size()); return false; } for (BLangExpression expr : exprs) { errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored; } } else if (listType.tag == TypeTags.TUPLE) { BTupleType tupleType = (BTupleType) listType; final int size = exprs.size(); final int requiredItems = tupleType.tupleTypes.size(); if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) { dlog.error(pos, code, requiredItems, size); return false; } int i; List<BType> memberTypes = tupleType.tupleTypes; for (i = 0; i < requiredItems; i++) { errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored; } if (size > requiredItems) { for (; i < size; i++) { errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored; } } } else { throw new IllegalStateException("Expected a list type, but found: " + listType); } return errored; } private boolean containsAnyType(BType type) { if (type == symTable.anyType) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().contains(symTable.anyType); } return false; } private BType getCompatibleRawTemplateType(BType expType, Location pos) { if (expType.tag != TypeTags.UNION) { return expType; } BUnionType unionType = (BUnionType) expType; List<BType> compatibleTypes = new ArrayList<>(); for (BType type : unionType.getMemberTypes()) { if (types.isAssignable(type, symTable.rawTemplateType)) { compatibleTypes.add(type); } } if (compatibleTypes.size() == 0) { return expType; } if (compatibleTypes.size() > 1) { dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType, expType); return symTable.semanticError; } return compatibleTypes.get(0); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { checkExpr(intRangeExpression.startExpr, env, symTable.intType); checkExpr(intRangeExpression.endExpr, env, symTable.intType); resultType = new BArrayType(symTable.intType); } @Override public void visit(BLangRestArgsExpression bLangRestArgExpression) { resultType = checkExpr(bLangRestArgExpression.expr, env, expType); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { if (expType.tag != TypeTags.TYPEDESC) { dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc); resultType = symTable.semanticError; return; } resultType = expType; } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType); bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType()); } @Override public void visit(BLangMatchExpression bLangMatchExpression) { SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env); checkExpr(bLangMatchExpression.expr, matchExprEnv); bLangMatchExpression.patternClauses.forEach(pattern -> { if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) { symbolEnter.defineNode(pattern.variable, matchExprEnv); } checkExpr(pattern.expr, matchExprEnv, expType); pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv)); }); LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression); BType actualType; if (matchExprTypes.contains(symTable.semanticError)) { actualType = symTable.semanticError; } else if (matchExprTypes.size() == 1) { actualType = matchExprTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, matchExprTypes); } resultType = types.checkType(bLangMatchExpression, actualType, expType); } @Override public void visit(BLangCheckedExpr checkedExpr) { checkWithinQueryExpr = isWithinQuery(); visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangQueryExpr queryExpr) { boolean cleanPrevEnvs = false; if (prevEnvs.empty()) { prevEnvs.push(env); cleanPrevEnvs = true; } if (breakToParallelQueryEnv) { queryEnvs.push(prevEnvs.peek()); } else { queryEnvs.push(env); } selectClauses.push(queryExpr.getSelectClause()); List<BLangNode> clauses = queryExpr.getQueryClauses(); BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection(); clauses.forEach(clause -> clause.accept(this)); BType actualType = resolveQueryType(queryEnvs.peek(), selectClauses.peek().expression, collectionNode.getBType(), expType, queryExpr); actualType = (actualType == symTable.semanticError) ? actualType : types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); if (cleanPrevEnvs) { prevEnvs.pop(); } if (actualType.tag == TypeTags.TABLE) { BTableType tableType = (BTableType) actualType; tableType.constraintPos = queryExpr.pos; tableType.isTypeInlineDefined = true; if (!validateTableType(tableType, null)) { resultType = symTable.semanticError; return; } } checkWithinQueryExpr = false; resultType = actualType; } private boolean isWithinQuery() { return !queryEnvs.isEmpty() && !selectClauses.isEmpty(); } private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType, BType targetType, BLangQueryExpr queryExpr) { List<BType> resultTypes = types.getAllTypes(targetType).stream() .filter(t -> !types.isAssignable(t, symTable.errorType)) .filter(t -> !types.isAssignable(t, symTable.nilType)) .collect(Collectors.toList()); if (resultTypes.isEmpty()) { resultTypes.add(symTable.noType); } BType actualType = symTable.semanticError; List<BType> selectTypes = new ArrayList<>(); List<BType> resolvedTypes = new ArrayList<>(); BType selectType, resolvedType; for (BType type : resultTypes) { switch (type.tag) { case TypeTags.ARRAY: selectType = checkExpr(selectExp, env, ((BArrayType) type).eType); resolvedType = new BArrayType(selectType); break; case TypeTags.TABLE: selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint, true, true)); resolvedType = symTable.tableType; break; case TypeTags.STREAM: selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint, true, true)); resolvedType = symTable.streamType; break; case TypeTags.STRING: case TypeTags.XML: selectType = checkExpr(selectExp, env, type); resolvedType = selectType; break; case TypeTags.NONE: default: selectType = checkExpr(selectExp, env, type); resolvedType = getNonContextualQueryType(selectType, collectionType); break; } if (selectType != symTable.semanticError) { if (resolvedType.tag == TypeTags.STREAM) { queryExpr.isStream = true; } if (resolvedType.tag == TypeTags.TABLE) { queryExpr.isTable = true; } selectTypes.add(selectType); resolvedTypes.add(resolvedType); } } if (selectTypes.size() == 1) { BType errorType = getErrorType(collectionType, queryExpr); selectType = selectTypes.get(0); if (queryExpr.isStream) { return new BStreamType(TypeTags.STREAM, selectType, errorType, null); } else if (queryExpr.isTable) { actualType = getQueryTableType(queryExpr, selectType); } else { actualType = resolvedTypes.get(0); } if (errorType != null && errorType.tag != TypeTags.NIL) { return BUnionType.create(null, actualType, errorType); } else { return actualType; } } else if (selectTypes.size() > 1) { dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes); return actualType; } else { return actualType; } } private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) { final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null); if (!queryExpr.fieldNameIdentifierList.isEmpty()) { tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream() .map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); return BUnionType.create(null, tableType, symTable.errorType); } return tableType; } private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) { if (collectionType.tag == TypeTags.SEMANTIC_ERROR) { return null; } BType returnType = null, errorType = null; switch (collectionType.tag) { case TypeTags.STREAM: errorType = ((BStreamType) collectionType).completionType; break; case TypeTags.OBJECT: returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType); break; default: BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); if (itrSymbol == this.symTable.notFoundSymbol) { return null; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol; returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType); } List<BType> errorTypes = new ArrayList<>(); if (returnType != null) { types.getAllTypes(returnType).stream() .filter(t -> types.isAssignable(t, symTable.errorType)) .forEach(errorTypes::add); } if (checkWithinQueryExpr && queryExpr.isStream) { if (errorTypes.isEmpty()) { errorTypes.add(symTable.nilType); } errorTypes.add(symTable.errorType); } if (!errorTypes.isEmpty()) { if (errorTypes.size() == 1) { errorType = errorTypes.get(0); } else { errorType = BUnionType.create(null, errorTypes.toArray(new BType[0])); } } return errorType; } private BType getNonContextualQueryType(BType staticType, BType basicType) { BType resultType; switch (basicType.tag) { case TypeTags.TABLE: resultType = symTable.tableType; break; case TypeTags.STREAM: resultType = symTable.streamType; break; case TypeTags.XML: resultType = new BXMLType(staticType, null); break; case TypeTags.STRING: resultType = symTable.stringType; break; default: resultType = new BArrayType(staticType); break; } return resultType; } @Override public void visit(BLangQueryAction queryAction) { if (prevEnvs.empty()) { prevEnvs.push(env); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek()); selectClauses.push(null); BLangDoClause doClause = queryAction.getDoClause(); List<BLangNode> clauses = queryAction.getQueryClauses(); clauses.forEach(clause -> clause.accept(this)); semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek())); BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangFromClause fromClause) { boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv; this.breakToParallelQueryEnv = true; SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop()); fromClause.env = fromEnv; queryEnvs.push(fromEnv); checkExpr(fromClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(fromClause); handleInputClauseVariables(fromClause, queryEnvs.peek()); this.breakToParallelQueryEnv = prevBreakToParallelEnv; } @Override public void visit(BLangJoinClause joinClause) { boolean prevBreakEnv = this.breakToParallelQueryEnv; this.breakToParallelQueryEnv = true; SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop()); joinClause.env = joinEnv; queryEnvs.push(joinEnv); checkExpr(joinClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(joinClause); handleInputClauseVariables(joinClause, queryEnvs.peek()); if (joinClause.onClause != null) { ((BLangOnClause) joinClause.onClause).accept(this); } this.breakToParallelQueryEnv = prevBreakEnv; } @Override public void visit(BLangLetClause letClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop()); letClause.env = letEnv; queryEnvs.push(letEnv); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv); } } @Override public void visit(BLangWhereClause whereClause) { whereClause.env = handleFilterClauses(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop()); selectClause.env = selectEnv; queryEnvs.push(selectEnv); } @Override public void visit(BLangDoClause doClause) { SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop()); doClause.env = letEnv; queryEnvs.push(letEnv); } @Override public void visit(BLangOnConflictClause onConflictClause) { BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType); if (!types.isAssignable(exprType, symTable.errorType)) { dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, symTable.errorType, exprType); } } @Override public void visit(BLangLimitClause limitClause) { BType exprType = checkExpr(limitClause.expression, queryEnvs.peek()); if (!types.isAssignable(exprType, symTable.intType)) { dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType, exprType); } } @Override public void visit(BLangOnClause onClause) { BType lhsType, rhsType; BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek()); onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode); lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv); onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode); rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek()); if (!types.isAssignable(lhsType, rhsType)) { dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType); } } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.env = queryEnvs.peek(); for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env); if (!types.isOrderedType(exprType, false)) { dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED); } } } @Override public void visit(BLangDo doNode) { if (doNode.onFailClause != null) { doNode.onFailClause.accept(this); } } public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> stmt.accept(this)); } private SymbolEnv handleFilterClauses (BLangExpression filterExpression) { checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType); BType actualType = filterExpression.getBType(); if (TypeTags.TUPLE == actualType.tag) { dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.booleanType, actualType); } SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, selectClauses.peek(), queryEnvs.pop()); queryEnvs.push(filterEnv); return filterEnv; } private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) { if (bLangInputClause.variableDefinitionNode == null) { return; } BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable(); if (bLangInputClause.isDeclaredWithVar) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv); if (types.isAssignable(bLangInputClause.varType, typeNodeType)) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } if (typeNodeType != symTable.semanticError) { dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bLangInputClause.varType, typeNodeType); } semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) { String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic"; BLangExpression exprWithCheckingKeyword = checkedExpr.expr; boolean firstVisit = exprWithCheckingKeyword.getBType() == null; BType typeOfExprWithCheckingKeyword; if (expType == symTable.noType) { typeOfExprWithCheckingKeyword = symTable.noType; } else { typeOfExprWithCheckingKeyword = BUnionType.create(null, expType, symTable.errorType); } if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) { rewriteWithEnsureTypeFunc(checkedExpr, typeOfExprWithCheckingKeyword); } BType exprType = checkExpr(checkedExpr.expr, env, typeOfExprWithCheckingKeyword); if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = checkedExpr.getBType(); exprType = checkedExpr.expr.getBType(); } } boolean isErrorType = types.isAssignable(exprType, symTable.errorType); if (exprType.tag != TypeTags.UNION && !isErrorType) { if (exprType.tag == TypeTags.READONLY) { checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{ add(symTable.errorType); }}; resultType = symTable.anyAndReadonly; return; } else if (exprType != symTable.semanticError) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); } checkedExpr.setBType(symTable.semanticError); return; } List<BType> errorTypes = new ArrayList<>(); List<BType> nonErrorTypes = new ArrayList<>(); if (!isErrorType) { for (BType memberType : ((BUnionType) exprType).getMemberTypes()) { if (memberType.tag == TypeTags.READONLY) { errorTypes.add(symTable.errorType); nonErrorTypes.add(symTable.anyAndReadonly); continue; } if (types.isAssignable(memberType, symTable.errorType)) { errorTypes.add(memberType); continue; } nonErrorTypes.add(memberType); } } else { errorTypes.add(exprType); } checkedExpr.equivalentErrorTypeList = errorTypes; if (errorTypes.isEmpty()) { dlog.error(checkedExpr.expr.pos, DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); checkedExpr.setBType(symTable.semanticError); return; } BType actualType; if (nonErrorTypes.size() == 0) { actualType = symTable.neverType; } else if (nonErrorTypes.size() == 1) { actualType = nonErrorTypes.get(0); } else { actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes)); } if (actualType.tag == TypeTags.NEVER) { dlog.error(checkedExpr.pos, DiagnosticErrorCode.NEVER_TYPE_NOT_ALLOWED_WITH_CHECKED_EXPR, operatorType); } resultType = types.checkType(checkedExpr, actualType, expType); } private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) { BType rhsType = getCandidateType(checkedExpr, type); if (rhsType == symTable.semanticError) { rhsType = getCandidateType(checkedExpr, rhsType); } BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType); if (!types.isLax(candidateLaxType)) { return; } ArrayList<BLangExpression> argExprs = new ArrayList<>(); BType typedescType = new BTypedescType(expType, null); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = expType; typedescExpr.setBType(typedescType); argExprs.add(typedescExpr); BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE, argExprs, checkedExpr.expr, checkedExpr.pos); invocation.symbol = symResolver.lookupLangLibMethod(type, names.fromString(invocation.name.value)); invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); checkedExpr.expr = invocation; } private BType getCandidateLaxType(BLangNode expr, BType rhsType) { if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { return types.getSafeType(rhsType, false, true); } return rhsType; } private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; int prevErrorCount = this.dlog.errorCount(); this.dlog.resetErrorCount(); this.dlog.mute(); checkedExpr.expr.cloneAttempt++; BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr); BType rhsType; if (checkExprCandidateType == symTable.semanticError) { rhsType = checkExpr(clone, env); } else { rhsType = checkExpr(clone, env, checkExprCandidateType); } this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; this.dlog.setErrorCount(prevErrorCount); if (!prevNonErrorLoggingCheck) { this.dlog.unmute(); } return rhsType; } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { resultType = serviceConstructorExpr.serviceNode.symbol.type; } @Override public void visit(BLangTypeTestExpr typeTestExpr) { typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env)); checkExpr(typeTestExpr.expr, env); resultType = types.checkType(typeTestExpr, symTable.booleanType, expType); } public void visit(BLangAnnotAccessExpr annotAccessExpr) { checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc); BType actualType = symTable.semanticError; BSymbol symbol = this.symResolver.resolveAnnotation(annotAccessExpr.pos, env, names.fromString(annotAccessExpr.pkgAlias.getValue()), names.fromString(annotAccessExpr.annotationName.getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION, annotAccessExpr.annotationName.getValue()); } else { annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol; BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType : ((BAnnotationSymbol) symbol).attachedType.type; actualType = BUnionType.create(null, annotType, symTable.nilType); } this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType); } private boolean isValidVariableReference(BLangExpression varRef) { switch (varRef.getKind()) { case SIMPLE_VARIABLE_REF: case RECORD_VARIABLE_REF: case TUPLE_VARIABLE_REF: case ERROR_VARIABLE_REF: case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case XML_ATTRIBUTE_ACCESS_EXPR: return true; default: dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType()); return false; } } private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) { if (origTargetType == symTable.readonlyType) { if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } if (origTargetType.tag != TypeTags.UNION) { return origTargetType; } boolean hasReadOnlyType = false; LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) { if (memberType == symTable.readonlyType) { hasReadOnlyType = true; continue; } nonReadOnlyTypes.add(memberType); } if (!hasReadOnlyType) { return origTargetType; } if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes); nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>())); return nonReadOnlyUnion; } private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv)); return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType); } private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) { if (paramTypes.size() != bLangArrowFunction.params.size()) { dlog.error(bLangArrowFunction.pos, DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH, paramTypes.size(), bLangArrowFunction.params.size()); resultType = symTable.semanticError; bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError)); return; } for (int i = 0; i < bLangArrowFunction.params.size(); i++) { BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i); BType bType = paramTypes.get(i); BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); valueTypeNode.setTypeKind(bType.getKind()); valueTypeNode.pos = symTable.builtinPos; paramIdentifier.setTypeNode(valueTypeNode); paramIdentifier.setBType(bType); } } private void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) { if (env.enclVarSym == varSymbol) { dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name); } } public List<BType> getListWithErrorTypes(int count) { List<BType> list = new ArrayList<>(count); for (int i = 0; i < count; i++) { list.add(symTable.semanticError); } return list; } private void checkFunctionInvocationExpr(BLangInvocation iExpr) { Name funcName = names.fromIdNode(iExpr.name); Name pkgAlias = names.fromIdNode(iExpr.pkgAlias); BSymbol funcSymbol = symTable.notFoundSymbol; BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr)); if (pkgSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias); } else { if (funcSymbol == symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { funcSymbol = symbol; } if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) && (symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) { funcSymbol = symbol; } } if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) { BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol; } } if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) { if (!missingNodesHelper.isMissingNode(funcName)) { dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName); } iExpr.argExprs.forEach(arg -> checkExpr(arg, env)); resultType = symTable.semanticError; return; } if (isFunctionPointer(funcSymbol)) { iExpr.functionPointerInvocation = true; markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID); if (langLibPackageID) { this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); } iExpr.symbol = funcSymbol; checkInvocationParamAndReturnType(iExpr); if (langLibPackageID && !iExpr.argExprs.isEmpty()) { checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol); } } protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) { BLangInvokableNode encInvokable = env.enclInvokable; if (symbol.closure == true || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE && env.node.getKind() != NodeKind.ARROW_EXPR) { return; } if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) && !isFunctionArgument(symbol, encInvokable.requiredParams)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.node.getKind() == NodeKind.ARROW_EXPR && !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; ((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } BLangNode node = env.node; SymbolEnv cEnv = env; while (node != null && node.getKind() != NodeKind.FUNCTION) { if (node.getKind() == NodeKind.ON_FAIL) { BLangOnFailClause onFailClause = (BLangOnFailClause) node; SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !resolvedSymbol.closure) { onFailClause.possibleClosureSymbols.add(resolvedSymbol); } break; } else { SymbolEnv enclEnv = cEnv.enclEnv; if (enclEnv == null) { break; } cEnv = enclEnv; node = cEnv.node; } } } private boolean isNotFunction(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION || (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) { return false; } if (isFunctionPointer(funcSymbol)) { return false; } return true; } private boolean isFunctionPointer(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) { return false; } return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE && funcSymbol.kind == SymbolKind.FUNCTION && (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE; } private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr, BType expectedType) { List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) { BType target = getErrorCtorNamedArgTargetType(namedArgsExpression, expectedType); BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression); BType type = checkExpr(clone, env, target); if (type == symTable.semanticError) { checkExpr(namedArgsExpression, env); } else { checkExpr(namedArgsExpression, env, target); } namedArgs.add(namedArgsExpression); } return namedArgs; } private BType getErrorCtorNamedArgTargetType(BLangNamedArgsExpression namedArgsExpression, BType expectedType) { if (expectedType == symTable.semanticError) { return symTable.semanticError; } if (expectedType.tag == TypeTags.MAP) { return ((BMapType) expectedType).constraint; } if (expectedType.tag != TypeTags.RECORD) { return symTable.semanticError; } BRecordType recordType = (BRecordType) expectedType; BField targetField = recordType.fields.get(namedArgsExpression.name.value); if (targetField != null) { return targetField.type; } if (!recordType.sealed && !recordType.fields.isEmpty()) { dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name, recordType); } return recordType.sealed ? symTable.noType : recordType.restFieldType; } private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) { if (objectType.getKind() == TypeKind.SERVICE && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION); return; } Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value)); BSymbol funcSymbol = symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol); if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) { if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) { dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value, objectType); resultType = symTable.semanticError; return; } } else { iExpr.symbol = funcSymbol; } if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } checkInvocationParamAndReturnType(iExpr); } private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) { BLangValueExpression varRef = (BLangValueExpression) aInv.expr; if (((varRef.symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType()); this.resultType = symTable.semanticError; aInv.symbol = symTable.notFoundSymbol; return; } BVarSymbol epSymbol = (BVarSymbol) varRef.symbol; Name remoteMethodQName = names .fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value)); Name actionName = names.fromIdNode(aInv.name); BSymbol remoteFuncSymbol = symResolver .lookupMemberSymbol(aInv.pos, epSymbol.type.tsymbol.scope, env, remoteMethodQName, SymTag.FUNCTION); if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) { dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType); resultType = symTable.semanticError; return; } if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName); this.resultType = symTable.semanticError; return; } if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && Symbols.isFlagOn(expType.flags, Flags.CLIENT) && types.isNeverTypeOrStructureTypeWithARequiredNeverMember ((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) { dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL); } aInv.symbol = remoteFuncSymbol; checkInvocationParamAndReturnType(aInv); } private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) { return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol; } private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) { Name funcName = names.fromString(iExpr.name.value); BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName); if (funcSymbol == symTable.notFoundSymbol) { return symTable.notFoundSymbol; } iExpr.symbol = funcSymbol; iExpr.langLibInvocation = true; SymbolEnv enclEnv = this.env; this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); iExpr.argExprs.add(0, iExpr.expr); checkInvocationParamAndReturnType(iExpr); this.env = enclEnv; return funcSymbol; } private void checkInvocationParamAndReturnType(BLangInvocation iExpr) { BType actualType = checkInvocationParam(iExpr); resultType = types.checkType(iExpr, actualType, this.expType); } private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams, Set<String> requiredParamNames) { if (openIncRecordParams.size() != 1) { return null; } LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields; for (String paramName : requiredParamNames) { if (!fields.containsKey(paramName)) { return null; } } return openIncRecordParams.get(0); } private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol, List<BVarSymbol> incRecordParams) { Set<String> requiredParamNames = new HashSet<>(); List<BVarSymbol> openIncRecordParams = new ArrayList<>(); for (BVarSymbol paramSymbol : invokableSymbol.params) { if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) && paramSymbol.type.getKind() == TypeKind.RECORD) { boolean recordWithDisallowFieldsOnly = true; LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields; for (String fieldName : fields.keySet()) { BField field = fields.get(fieldName); if (field.symbol.type.tag != TypeTags.NEVER) { recordWithDisallowFieldsOnly = false; incRecordParams.add(field.symbol); requiredParamNames.add(fieldName); } } if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) { openIncRecordParams.add(paramSymbol); } } else { requiredParamNames.add(paramSymbol.name.value); } } return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames); } private BType checkInvocationParam(BLangInvocation iExpr) { if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE); return symTable.semanticError; } if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) { dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type); return symTable.noType; } BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol); List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes(); List<BVarSymbol> incRecordParams = new ArrayList<>(); BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol, incRecordParams); int parameterCountForPositionalArgs = paramTypes.size(); int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size(); iExpr.requiredArgs = new ArrayList<>(); for (BVarSymbol symbol : invokableSymbol.params) { if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) || symbol.type.tag != TypeTags.RECORD) { continue; } LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields; if (fields.isEmpty()) { continue; } for (String field : fields.keySet()) { if (fields.get(field).type.tag != TypeTags.NEVER) { parameterCountForNamedArgs = parameterCountForNamedArgs - 1; break; } } } int i = 0; BLangExpression vararg = null; boolean foundNamedArg = false; for (BLangExpression expr : iExpr.argExprs) { switch (expr.getKind()) { case NAMED_ARGS_EXPR: foundNamedArg = true; if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) { iExpr.requiredArgs.add(expr); } else { dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); } i++; break; case REST_ARGS_EXPR: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG); continue; } vararg = expr; break; default: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG); } if (i < parameterCountForPositionalArgs) { iExpr.requiredArgs.add(expr); } else { iExpr.restArgs.add(expr); } i++; break; } } return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams, incRecordParamAllowAdditionalFields); } private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol; BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type; BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol; List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params); List<BLangExpression> nonRestArgs = iExpr.requiredArgs; List<BVarSymbol> valueProvidedParams = new ArrayList<>(); List<BVarSymbol> requiredParams = new ArrayList<>(); List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(); for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.isDefaultable) { continue; } requiredParams.add(nonRestParam); } for (BVarSymbol incRecordParam : incRecordParams) { if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) { requiredIncRecordParams.add(incRecordParam); } } int i = 0; for (; i < nonRestArgs.size(); i++) { BLangExpression arg = nonRestArgs.get(i); if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) { BType expectedType = paramTypes.get(i); types.checkType(arg.pos, arg.getBType(), expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES); types.setImplicitCastExpr(arg, arg.getBType(), expectedType); } if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { if (i < nonRestParams.size()) { BVarSymbol param = nonRestParams.get(i); checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation); valueProvidedParams.add(param); requiredParams.remove(param); continue; } break; } if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) { BLangIdentifier argName = ((NamedArgNode) arg).getName(); BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr, nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields); if (varSym == null) { dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName); break; } requiredParams.remove(varSym); requiredIncRecordParams.remove(varSym); if (valueProvidedParams.contains(varSym)) { dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value); continue; } checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation); valueProvidedParams.add(varSym); } } BVarSymbol restParam = invokableTypeSymbol.restParam; boolean errored = false; if (!requiredParams.isEmpty() && vararg == null) { for (BVarSymbol requiredParam : requiredParams) { if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name, iExpr.name.value); errored = true; } } } if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) { for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) { for (BVarSymbol requiredParam : requiredParams) { if (requiredParam.type == requiredIncRecordParam.owner.type) { dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredIncRecordParam.name, iExpr.name.value); errored = true; } } } } if (restParam == null && (!iExpr.restArgs.isEmpty() || (vararg != null && valueProvidedParams.size() == nonRestParams.size()))) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); errored = true; } if (errored) { return symTable.semanticError; } BType listTypeRestArg = restParam == null ? null : restParam.type; BRecordType mappingTypeRestArg = null; if (vararg != null && nonRestArgs.size() < nonRestParams.size()) { PackageID pkgID = env.enclPkg.symbol.pkgID; List<BType> tupleMemberTypes = new ArrayList<>(); BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL); mappingTypeRestArg = new BRecordType(recordSymbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); BType tupleRestType = null; BVarSymbol fieldSymbol; for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) { BType paramType = paramTypes.get(j); BVarSymbol nonRestParam = nonRestParams.get(j); Name paramName = nonRestParam.name; tupleMemberTypes.add(paramType); boolean required = requiredParams.contains(nonRestParam); fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName, pkgID, paramType, recordSymbol, null, VIRTUAL); fields.put(paramName.value, new BField(paramName, null, fieldSymbol)); } if (listTypeRestArg != null) { if (listTypeRestArg.tag == TypeTags.ARRAY) { tupleRestType = ((BArrayType) listTypeRestArg).eType; } else if (listTypeRestArg.tag == TypeTags.TUPLE) { BTupleType restTupleType = (BTupleType) listTypeRestArg; tupleMemberTypes.addAll(restTupleType.tupleTypes); if (restTupleType.restType != null) { tupleRestType = restTupleType.restType; } } } BTupleType tupleType = new BTupleType(tupleMemberTypes); tupleType.restType = tupleRestType; listTypeRestArg = tupleType; mappingTypeRestArg.sealed = true; mappingTypeRestArg.restFieldType = symTable.noType; mappingTypeRestArg.fields = fields; recordSymbol.type = mappingTypeRestArg; mappingTypeRestArg.tsymbol = recordSymbol; } if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) { dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); return symTable.semanticError; } BType restType = null; if (vararg != null && !iExpr.restArgs.isEmpty()) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); restType = this.resultType; } else if (vararg != null) { iExpr.restArgs.add(vararg); if (mappingTypeRestArg != null) { LinkedHashSet<BType> restTypes = new LinkedHashSet<>(); restTypes.add(listTypeRestArg); restTypes.add(mappingTypeRestArg); BType actualType = BUnionType.create(null, restTypes); checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation); } else { checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation); } restType = this.resultType; } else if (!iExpr.restArgs.isEmpty()) { if (listTypeRestArg.tag == TypeTags.ARRAY) { BType elementType = ((BArrayType) listTypeRestArg).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } else { BTupleType tupleType = (BTupleType) listTypeRestArg; List<BType> tupleMemberTypes = tupleType.tupleTypes; BType tupleRestType = tupleType.restType; int tupleMemCount = tupleMemberTypes.size(); for (int j = 0; j < iExpr.restArgs.size(); j++) { BLangExpression restArg = iExpr.restArgs.get(j); BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType; checkTypeParamExpr(restArg, this.env, memType, true); if (restType != symTable.semanticError && this.resultType == symTable.semanticError) { restType = this.resultType; } } } } BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType()); if (restType != symTable.semanticError && Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) && Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) { retType = unifier.build(retType, expType, iExpr, types, symTable, dlog); } boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID); String sortFuncName = "sort"; if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) { checkArrayLibSortFuncArgs(iExpr); } if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) { return this.generateFutureType(invokableSymbol, retType); } else { return retType; } } private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) { if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } if (iExpr.argExprs.size() != 3) { return; } BLangExpression keyFunction = iExpr.argExprs.get(2); BType keyFunctionType = keyFunction.getBType(); if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (keyFunctionType.tag == TypeTags.NIL) { if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).getBType()); } return; } Location pos; BType returnType; if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { pos = keyFunction.pos; returnType = keyFunction.getBType().getReturnType(); } else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction); pos = arrowFunction.body.expr.pos; returnType = arrowFunction.body.expr.getBType(); if (returnType.tag == TypeTags.SEMANTIC_ERROR) { return; } } else { BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction; pos = keyLambdaFunction.function.pos; returnType = keyLambdaFunction.function.getBType().getReturnType(); } if (!types.isOrderedType(returnType, false)) { dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType); } } private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr, List<BVarSymbol> nonRestParams, List<BVarSymbol> incRecordParams, BVarSymbol incRecordParamAllowAdditionalFields) { for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.getName().value.equals(argName.value)) { return nonRestParam; } } for (BVarSymbol incRecordParam : incRecordParams) { if (incRecordParam.getName().value.equals(argName.value)) { return incRecordParam; } } if (incRecordParamAllowAdditionalFields != null) { BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type; checkExpr(expr, env, incRecordType.restFieldType); if (!incRecordType.fields.containsKey(argName.value)) { return new BVarSymbol(0, names.fromIdNode(argName), null, symTable.noType, null, argName.pos, VIRTUAL); } } return null; } private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) { boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX); return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart); } private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral); } private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { if (typeParamAnalyzer.notRequireTypeParams(env)) { checkExpr(arg, env, expectedType); return; } if (requireTypeInference(arg, inferTypeForNumericLiteral)) { BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env); BType inferredType = checkExpr(arg, env, expType); typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType); return; } checkExpr(arg, env, expectedType); typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType); } private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) { switch (expr.getKind()) { case GROUP_EXPR: return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral); case ARROW_EXPR: case LIST_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: return true; case NUMERIC_LITERAL: return inferTypeForNumericLiteral; default: return false; } } private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) { BType fieldType = symTable.semanticError; boolean keyValueField = field.isKeyValueField(); boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP; boolean readOnlyConstructorField = false; String fieldName = null; Location pos = null; BLangExpression valueExpr = null; if (keyValueField) { valueExpr = ((BLangRecordKeyValueField) field).valueExpr; } else if (!spreadOpField) { valueExpr = (BLangRecordVarNameField) field; } switch (mappingType.tag) { case TypeTags.RECORD: if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; fieldType = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType); readOnlyConstructorField = keyValField.readonly; pos = key.expr.pos; fieldName = getKeyValueFieldName(keyValField); } else if (spreadOpField) { BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; checkExpr(spreadExpr, this.env); BType spreadExprType = spreadExpr.getBType(); if (spreadExprType.tag == TypeTags.MAP) { return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint, getAllFieldType((BRecordType) mappingType), DiagnosticErrorCode.INCOMPATIBLE_TYPES); } if (spreadExprType.tag != TypeTags.RECORD) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadExprType); return symTable.semanticError; } boolean errored = false; for (BField bField : ((BRecordType) spreadExprType).fields.values()) { BType specFieldType = bField.type; BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, this.env, bField.name, (BRecordType) mappingType); if (expectedFieldType != symTable.semanticError && !types.isAssignable(specFieldType, expectedFieldType)) { dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD, expectedFieldType, bField.name, specFieldType); if (!errored) { errored = true; } } } return errored ? symTable.semanticError : symTable.noType; } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; fieldType = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } break; case TypeTags.MAP: if (spreadOpField) { BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; BType spreadOpType = checkExpr(spreadExp, this.env); BType spreadOpMemberType; switch (spreadOpType.tag) { case TypeTags.RECORD: List<BType> types = new ArrayList<>(); BRecordType recordType = (BRecordType) spreadOpType; for (BField recField : recordType.fields.values()) { types.add(recField.type); } if (!recordType.sealed) { types.add(recordType.restFieldType); } spreadOpMemberType = getRepresentativeBroadType(types); break; case TypeTags.MAP: spreadOpMemberType = ((BMapType) spreadOpType).constraint; break; default: dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadOpType); return symTable.semanticError; } return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint, DiagnosticErrorCode.INCOMPATIBLE_TYPES); } boolean validMapKey; if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey); readOnlyConstructorField = keyValField.readonly; pos = key.pos; fieldName = getKeyValueFieldName(keyValField); } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError; break; } if (readOnlyConstructorField) { if (types.isSelectivelyImmutableType(fieldType)) { fieldType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) fieldType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } else if (!types.isInherentlyImmutableType(fieldType)) { dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType); fieldType = symTable.semanticError; } } if (spreadOpField) { valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } BLangExpression exprToCheck = valueExpr; if (this.nonErrorLoggingCheck) { exprToCheck = nodeCloner.cloneNode(valueExpr); } else { ((BLangNode) field).setBType(fieldType); } return checkExpr(exprToCheck, this.env, fieldType); } private BType checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) { Name fieldName; if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (recordType.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(recordType.restFieldType); } return BUnionType.create(null, fieldTypes); } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr; fieldName = names.fromIdNode(varRef.variableName); } else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) { fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value); } else { dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return symTable.semanticError; } return checkRecordLiteralKeyByName(keyExpr.pos, this.env, fieldName, recordType); } private BType checkRecordLiteralKeyByName(Location location, SymbolEnv env, Name key, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(location, env, key, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return fieldSymbol.type; } if (recordType.sealed) { dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key, recordType.tsymbol.type.getKind().typeName(), recordType); return symTable.semanticError; } return recordType.restFieldType; } private BType getAllFieldType(BRecordType recordType) { LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BField field : recordType.fields.values()) { possibleTypes.add(field.type); } BType restFieldType = recordType.restFieldType; if (restFieldType != null && restFieldType != symTable.noType) { possibleTypes.add(restFieldType); } return BUnionType.create(null, possibleTypes); } private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) { if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.getBType() == symTable.semanticError) { return false; } return true; } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF || (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) { return true; } dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY); return false; } private BType addNilForNillableAccessType(BType actualType) { if (actualType.isNullable()) { return actualType; } return BUnionType.create(null, actualType, symTable.nilType); } private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return symTable.semanticError; } if (recordType.sealed) { return symTable.semanticError; } return recordType.restFieldType; } private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess, Name fieldName, BObjectType objectType) { BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos, this.env, fieldName, objectType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, fieldName.value)); fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { dlog.error(bLangFieldBasedAccess.field.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol); return symTable.semanticError; } if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) && !Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) { fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol); fieldSymbol.flags &= ~Flags.ISOLATED; fieldSymbol.type.flags &= ~Flags.ISOLATED; } bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkTupleFieldType(BType tupleType, int indexValue) { BTupleType bTupleType = (BTupleType) tupleType; if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) { return bTupleType.restType; } else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) { return symTable.semanticError; } return bTupleType.tupleTypes.get(indexValue); } private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) { BLangExpression startTagName = bLangXMLElementLiteral.startTagName; checkExpr(startTagName, xmlElementEnv, symTable.stringType); BLangExpression endTagName = bLangXMLElementLiteral.endTagName; if (endTagName == null) { return; } checkExpr(endTagName, xmlElementEnv, symTable.stringType); if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME && startTagName.equals(endTagName)) { return; } if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) { return; } dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH); } private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) { for (BLangExpression expr : exprs) { checkExpr(expr, env); BType type = expr.getBType(); if (type == symTable.semanticError) { continue; } if (!types.isNonNilSimpleBasicTypeOrString(type)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType), type); } } } /** * Concatenate the consecutive text type nodes, and get the reduced set of children. * * @param exprs Child nodes * @param xmlElementEnv * @return Reduced set of children */ private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) { List<BLangExpression> newChildren = new ArrayList<>(); List<BLangExpression> tempConcatExpressions = new ArrayList<>(); for (BLangExpression expr : exprs) { BType exprType; if (expr.getKind() == NodeKind.QUERY_EXPR) { exprType = checkExpr(expr, xmlElementEnv, expType); } else { exprType = checkExpr(expr, xmlElementEnv); } if (TypeTags.isXMLTypeTag(exprType.tag)) { if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); tempConcatExpressions = new ArrayList<>(); } newChildren.add(expr); continue; } BType type = expr.getBType(); if (type.tag >= TypeTags.JSON) { if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) { dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } tempConcatExpressions.add(expr); } if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); } return newChildren; } private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.textFragments = exprs; xmlTextLiteral.pos = exprs.get(0).pos; xmlTextLiteral.setBType(symTable.xmlType); return xmlTextLiteral; } private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) { accessExpr.originalType = actualType; BUnionType unionType = BUnionType.create(null, actualType); if (returnsNull(accessExpr)) { unionType.add(symTable.nilType); } BType parentType = accessExpr.expr.getBType(); if (accessExpr.errorSafeNavigation && (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION && ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) { unionType.add(symTable.errorType); } if (unionType.getMemberTypes().size() == 1) { return unionType.getMemberTypes().toArray(new BType[0])[0]; } return unionType; } private boolean returnsNull(BLangAccessExpression accessExpr) { BType parentType = accessExpr.expr.getBType(); if (parentType.isNullable() && parentType.tag != TypeTags.JSON) { return true; } if (parentType.tag != TypeTags.MAP) { return false; } if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.getBType().tag == TypeTags.MAP) { BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint; return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON; } return false; } private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.OBJECT) { return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return symTable.semanticError; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType == symTable.semanticError) { return fieldType; } return BUnionType.create(null, fieldType, symTable.nilType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.isEmpty()) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) { actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) { actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); if (actualType != symTable.semanticError) { fieldAccessExpr.originalType = actualType; return actualType; } if (!fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD, varRefType, fieldName); return actualType; } actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType); } } else if (types.isLax(varRefType)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType); return symTable.semanticError; } if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } BType laxFieldAccessType = getLaxFieldAccessType(varRefType); actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.originalType = laxFieldAccessType; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (fieldAccessExpr.isLValue) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); } actualType = symTable.xmlType; fieldAccessExpr.originalType = actualType; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType); } return actualType; } private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr; String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value; BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix)); if (nsSymbol == symTable.notFoundSymbol) { dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsPrefixedFieldAccess.nsPrefix); } else if (nsSymbol.getKind() == SymbolKind.PACKAGE) { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst( nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value, (BPackageSymbol) nsSymbol, fieldAccessExpr.pos); } else { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol; } } private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) { return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType); } private BType getLaxFieldAccessType(BType exprType) { switch (exprType.tag) { case TypeTags.JSON: return symTable.jsonType; case TypeTags.XML: case TypeTags.XML_ELEMENT: return symTable.stringType; case TypeTags.MAP: return ((BMapType) exprType).constraint; case TypeTags.UNION: BUnionType unionType = (BUnionType) exprType; if (types.isSameType(symTable.jsonType, unionType)) { return symTable.jsonType; } LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType))); return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes); } return symTable.semanticError; } private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; boolean nillableExprType = false; BType effectiveType = varRefType; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); } } if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) { actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName); if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD, varRefType, fieldName); } fieldAccessExpr.nilSafeNavigation = nillableExprType; fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isLax(effectiveType)) { BType laxFieldAccessType = getLaxFieldAccessType(effectiveType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType); } if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private boolean accessCouldResultInError(BType type) { if (type.tag == TypeTags.JSON) { return true; } if (type.tag == TypeTags.MAP) { return false; } if (type.tag == TypeTags.XML) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError); } else { return false; } } private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) { BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType()); boolean nillableExprType = false; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } if (nillableExprType) { varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); if (!types.isSubTypeOfMapping(varRefType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } } } } BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; BType actualType = symTable.semanticError; if (types.isSubTypeOfMapping(varRefType)) { checkExpr(indexExpr, this.env, symTable.stringType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType); if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD, fieldName, indexBasedAccessExpr.expr.getBType()); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } indexBasedAccessExpr.nilSafeNavigation = nillableExprType; indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (types.isSubTypeOfList(varRefType)) { checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType); indexBasedAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) { dlog.error(indexBasedAccessExpr.indexExpr.pos, DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr)); return actualType; } dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType()); return actualType; } } else if (types.isAssignable(varRefType, symTable.stringType)) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.getBType() == symTable.semanticError) { return symTable.semanticError; } indexBasedAccessExpr.originalType = symTable.stringType; actualType = symTable.stringType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (indexBasedAccessExpr.isLValue) { indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE); return actualType; } BType type = checkExpr(indexExpr, this.env, symTable.intType); if (type == symTable.semanticError) { return type; } indexBasedAccessExpr.originalType = varRefType; actualType = varRefType; } else if (varRefType.tag == TypeTags.TABLE) { if (indexBasedAccessExpr.isLValue) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS, varRefType); return symTable.semanticError; } BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType(); BType keyTypeConstraint = tableType.keyTypeConstraint; if (tableType.keyTypeConstraint == null) { keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()). fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint); if (keyTypeConstraint == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE, indexBasedAccessExpr.expr); return symTable.semanticError; } } if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) { checkExpr(indexExpr, this.env, keyTypeConstraint); if (indexExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } else { List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr) indexBasedAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes; if (keyConstraintTypes.size() != multiKeyExpressionList.size()) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } for (int i = 0; i < multiKeyExpressionList.size(); i++) { BLangExpression keyExpr = multiKeyExpressionList.get(i); checkExpr(keyExpr, this.env, keyConstraintTypes.get(i)); if (keyExpr.getBType() == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } } if (expType.tag != TypeTags.NONE) { BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType); if (resultType == symTable.semanticError) { return symTable.semanticError; } } BType constraint = tableType.constraint; actualType = addNilForNillableAccessType(constraint); indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType : types.getTypeWithoutNil(actualType); } else if (varRefType == symTable.semanticError) { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); return symTable.semanticError; } else { indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError); dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS, indexBasedAccessExpr.expr.getBType()); return symTable.semanticError; } if (nillableExprType && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private Long getConstIndex(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value : (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private String getConstFieldName(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value : (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType, BArrayType arrayType) { BType actualType = symTable.semanticError; switch (indexExprType.tag) { case TypeTags.INT: BLangExpression indexExpr = indexBasedAccess.indexExpr; if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) { actualType = arrayType.eType; break; } actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType; break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) indexExprType; boolean validIndexExists = false; for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); if (indexValue >= 0 && (arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) { validIndexExists = true; break; } } if (!validIndexExists) { return symTable.semanticError; } actualType = arrayType.eType; break; case TypeTags.UNION: List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(matchedType -> (BFiniteType) matchedType) .collect(Collectors.toList()); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType); if (elementType == symTable.semanticError) { return symTable.semanticError; } actualType = arrayType.eType; } return actualType; } private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.ARRAY) { return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type); } if (type.tag == TypeTags.TUPLE) { return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType()); } LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.INT: if (isConst(indexExpr)) { actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue()); } else { BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType(); LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>()); actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); } break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); BType fieldType = checkTupleFieldType(tuple, indexValue); if (fieldType.tag != TypeTags.SEMANTIC_ERROR) { possibleTypes.add(fieldType); } } if (possibleTypes.size() == 0) { return symTable.semanticError; } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) { tupleType.tupleTypes .forEach(memberType -> { if (memberType.tag == TypeTags.UNION) { collectMemberTypes((BUnionType) memberType, memberTypes); } else { memberTypes.add(memberType); } }); return memberTypes; } private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.MAP) { BType constraint = ((BMapType) type).constraint; return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint); } if (type.tag == TypeTags.RECORD) { return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType()); } BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.STRING: if (isConst(indexExpr)) { String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr)); actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType != symTable.semanticError) { return actualType; } actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { return actualType; } if (actualType == symTable.neverType) { return actualType; } return addNilForNillableAccessType(actualType); } if (accessExpr.isLValue) { return actualType; } return addNilForNillableAccessType(actualType); } LinkedHashSet<BType> fieldTypes = record.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (record.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(record.restFieldType); } if (fieldTypes.stream().noneMatch(BType::isNullable)) { fieldTypes.add(symTable.nilType); } actualType = BUnionType.create(null, fieldTypes); break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { String fieldName = (String) ((BLangLiteral) finiteMember).value; BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); } if (fieldType != symTable.semanticError) { fieldType = addNilForNillableAccessType(fieldType); } } if (fieldType.tag == TypeTags.SEMANTIC_ERROR) { continue; } possibleTypes.add(fieldType); } if (possibleTypes.isEmpty()) { return symTable.semanticError; } if (possibleTypes.stream().noneMatch(BType::isNullable)) { possibleTypes.add(symTable.nilType); } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private List<BType> getTypesList(BType type) { if (type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) type; return new ArrayList<>(unionType.getMemberTypes()); } else { return Lists.of(type); } } private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType()); LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>(); for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternExprType = pattern.expr.getBType(); matchExprTypes.addAll(getTypesList(patternExprType)); if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) { return new LinkedHashSet<BType>() { { add(symTable.semanticError); } }; } assignable = this.types.isAssignable(type, pattern.variable.getBType()); if (assignable) { break; } } if (!assignable) { matchExprTypes.add(type); } } return matchExprTypes; } private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) { if (encounteredTypes.contains(type)) { return false; } encounteredTypes.add(type); switch (type.tag) { case TypeTags.UNION: for (BType bType1 : ((BUnionType) type).getMemberTypes()) { if (couldHoldTableValues(bType1, encounteredTypes)) { return true; } } return false; case TypeTags.MAP: return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes); case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (couldHoldTableValues(field.type, encounteredTypes)) { return true; } } return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes); case TypeTags.ARRAY: return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes); case TypeTags.TUPLE: for (BType bType : ((BTupleType) type).getTupleTypes()) { if (couldHoldTableValues(bType, encounteredTypes)) { return true; } } return false; } return false; } private boolean isConst(BLangExpression expression) { if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) { return true; } if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; } private Name getCurrentCompUnit(BLangNode node) { return names.fromString(node.pos.lineRange().filePath()); } private BType getRepresentativeBroadType(List<BType> inferredTypeList) { for (int i = 0; i < inferredTypeList.size(); i++) { BType type = inferredTypeList.get(i); if (type.tag == TypeTags.SEMANTIC_ERROR) { return type; } for (int j = i + 1; j < inferredTypeList.size(); j++) { BType otherType = inferredTypeList.get(j); if (otherType.tag == TypeTags.SEMANTIC_ERROR) { return otherType; } if (types.isAssignable(otherType, type)) { inferredTypeList.remove(j); j -= 1; continue; } if (types.isAssignable(type, otherType)) { inferredTypeList.remove(i); i -= 1; break; } } } if (inferredTypeList.size() == 1) { return inferredTypeList.get(0); } return BUnionType.create(null, inferredTypeList.toArray(new BType[0])); } private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>(); List<BType> restFieldTypes = new ArrayList<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValue.key; BLangExpression expression = keyValue.valueExpr; BLangExpression keyExpr = key.expr; if (key.computedKey) { checkExpr(keyExpr, env, symTable.stringType); BType exprType = checkExpr(expression, env, expType); if (isUniqueType(restFieldTypes, exprType)) { restFieldTypes.add(exprType); } } else { addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr), keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) : checkExpr(expression, env, expType), true, keyValue.readonly); } } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType); int typeTag = type.tag; if (typeTag == TypeTags.MAP) { BType constraintType = ((BMapType) type).constraint; if (isUniqueType(restFieldTypes, constraintType)) { restFieldTypes.add(constraintType); } } if (type.tag != TypeTags.RECORD) { continue; } BRecordType recordType = (BRecordType) type; for (BField recField : recordType.fields.values()) { addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type, !Symbols.isOptional(recField.symbol), false); } if (!recordType.sealed) { BType restFieldType = recordType.restFieldType; if (isUniqueType(restFieldTypes, restFieldType)) { restFieldTypes.add(restFieldType); } } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) : checkExpr(varNameField, env, expType), true, varNameField.readonly); } } LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); boolean allReadOnlyNonRestFields = true; for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) { FieldInfo fieldInfo = entry.getValue(); List<BType> types = fieldInfo.types; if (types.contains(symTable.semanticError)) { return symTable.semanticError; } String key = entry.getKey(); Name fieldName = names.fromString(key); BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0])); Set<Flag> flags = new HashSet<>(); if (fieldInfo.required) { flags.add(Flag.REQUIRED); } else { flags.add(Flag.OPTIONAL); } if (fieldInfo.readonly) { flags.add(Flag.READONLY); } else if (allReadOnlyNonRestFields) { allReadOnlyNonRestFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = fields; if (restFieldTypes.contains(symTable.semanticError)) { return symTable.semanticError; } if (restFieldTypes.isEmpty()) { recordType.sealed = true; recordType.restFieldType = symTable.noType; } else if (restFieldTypes.size() == 1) { recordType.restFieldType = restFieldTypes.get(0); } else { recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0])); } recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) { recordType.flags |= Flags.READONLY; recordSymbol.flags |= Flags.READONLY; } BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); return recordType; } private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location, SymbolOrigin origin) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)), pkgID, null, env.scope.owner, location, origin); BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, location); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); return recordSymbol; } private String getKeyName(BLangExpression key) { return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? ((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value; } private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString, BType exprType, boolean required, boolean readonly) { if (!nonRestFieldTypes.containsKey(keyString)) { nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required, readonly)); return; } FieldInfo fieldInfo = nonRestFieldTypes.get(keyString); List<BType> typeList = fieldInfo.types; if (isUniqueType(typeList, exprType)) { typeList.add(exprType); } if (required && !fieldInfo.required) { fieldInfo.required = true; } } private boolean isUniqueType(List<BType> typeList, BType type) { boolean isRecord = type.tag == TypeTags.RECORD; for (BType bType : typeList) { if (isRecord) { if (type == bType) { return false; } } else if (types.isSameType(type, bType)) { return false; } } return true; } private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType, BType expType) { if (expType == symTable.semanticError) { return expType; } boolean unionExpType = expType.tag == TypeTags.UNION; if (expType == mutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) { return mutableXmlSubType; } BXMLSubType immutableXmlSubType = (BXMLSubType) ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable, anonymousModelHelper, names); if (expType == immutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) { return immutableXmlSubType; } if (!unionExpType) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } List<BType> compatibleTypes = new ArrayList<>(); for (BType memberType : ((BUnionType) expType).getMemberTypes()) { if (compatibleTypes.contains(memberType)) { continue; } if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) { compatibleTypes.add(memberType); continue; } if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) { compatibleTypes.add(mutableXmlSubType); continue; } if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) { compatibleTypes.add(immutableXmlSubType); } } if (compatibleTypes.isEmpty()) { dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } if (compatibleTypes.size() == 1) { return compatibleTypes.get(0); } dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) { for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) { BType childType = modifiedChild.getBType(); if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) { continue; } modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, (SelectivelyImmutableReferenceType) childType, env, symTable, anonymousModelHelper, names)); if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) { markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild); } } } private void logUndefinedSymbolError(Location pos, String name) { if (!missingNodesHelper.isMissingNode(name)) { dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name); } } private void markTypeAsIsolated(BType actualType) { actualType.flags |= Flags.ISOLATED; actualType.tsymbol.flags |= Flags.ISOLATED; } private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) { return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS); } private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) { List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions; BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env, names.fromIdNode(userDefinedType.pkgAlias), names.fromIdNode(userDefinedType.typeName)); for (BLangClassDefinition classDefinition : classDefinitions) { if (classDefinition.symbol == symbol) { return classDefinition; } } return null; } private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType, BLangClassDefinition classDefForConstructor, SymbolEnv env, boolean logErrors) { boolean hasNeverReadOnlyField = false; for (BField field : actualObjectType.fields.values()) { BType fieldType = field.type; if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) { semanticAnalyzer.analyzeNode(classDefForConstructor, env); hasNeverReadOnlyField = true; if (!logErrors) { return; } dlog.error(field.pos, DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE, fieldType); } } if (hasNeverReadOnlyField) { return; } classDefForConstructor.flagSet.add(Flag.READONLY); actualObjectType.flags |= Flags.READONLY; actualObjectType.tsymbol.flags |= Flags.READONLY; ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types, anonymousModelHelper, symTable, names, cIExpr.pos); semanticAnalyzer.analyzeNode(classDefForConstructor, env); } private void markConstructedObjectIsolatedness(BObjectType actualObjectType) { if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) { markTypeAsIsolated(actualObjectType); return; } for (BField field : actualObjectType.fields.values()) { if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) || !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) { return; } } markTypeAsIsolated(actualObjectType); } private void markLeafNode(BLangAccessExpression accessExpression) { BLangNode parent = accessExpression.parent; if (parent == null) { accessExpression.leafNode = true; return; } NodeKind kind = parent.getKind(); while (kind == NodeKind.GROUP_EXPR) { parent = parent.parent; if (parent == null) { accessExpression.leafNode = true; break; } kind = parent.getKind(); } if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) { accessExpression.leafNode = true; } } private static class FieldInfo { List<BType> types; boolean required; boolean readonly; private FieldInfo(List<BType> types, boolean required, boolean readonly) { this.types = types; this.required = required; this.readonly = readonly; } } }
For example `cannot use type inclusion with more than one open record with different rest descriptor types`
private void defineFieldsOfObjectOrRecordTypeDef(BLangTypeDefinition typeDef, SymbolEnv pkgEnv) { NodeKind nodeKind = typeDef.typeNode.getKind(); if (nodeKind != NodeKind.OBJECT_TYPE && nodeKind != NodeKind.RECORD_TYPE) { return; } BStructureType structureType = (BStructureType) typeDef.symbol.type; BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDef.typeNode; SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(structureTypeNode, typeDef.symbol.scope, pkgEnv); resolveFields(structureType, structureTypeNode, typeDefEnv); if (typeDef.symbol.kind != SymbolKind.RECORD) { return; } BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode; BRecordType recordType = (BRecordType) structureType; recordType.sealed = recordTypeNode.sealed; if (recordTypeNode.sealed && recordTypeNode.restFieldType != null) { dlog.error(recordTypeNode.restFieldType.pos, DiagnosticErrorCode.REST_FIELD_NOT_ALLOWED_IN_SEALED_RECORDS); return; } if (recordTypeNode.restFieldType != null) { recordType.restFieldType = symResolver.resolveTypeNode(recordTypeNode.restFieldType, typeDefEnv); return; } if (!recordTypeNode.sealed) { recordType.restFieldType = symTable.anydataType; return; } for (BLangType typeRef : recordTypeNode.typeRefs) { if (typeRef.type.tag != TypeTags.RECORD) { continue; } BType restFieldType = ((BRecordType) typeRef.type).restFieldType; if (restFieldType == null || restFieldType == symTable.noType) { continue; } if (recordType.restFieldType != null && recordType.restFieldType.tag != restFieldType.tag) { recordType.restFieldType = symTable.noType; dlog.error(recordTypeNode.pos, DiagnosticErrorCode.CANNOT_USE_TYPE_INCLUSION_WITH_MORE_THAN_ONE_OPEN_RECORD); return; } recordType.restFieldType = restFieldType; recordType.sealed = false; } if (recordType.restFieldType != null) { return; } recordType.restFieldType = symTable.noType; }
DiagnosticErrorCode.CANNOT_USE_TYPE_INCLUSION_WITH_MORE_THAN_ONE_OPEN_RECORD);
private void defineFieldsOfObjectOrRecordTypeDef(BLangTypeDefinition typeDef, SymbolEnv pkgEnv) { NodeKind nodeKind = typeDef.typeNode.getKind(); if (nodeKind != NodeKind.OBJECT_TYPE && nodeKind != NodeKind.RECORD_TYPE) { return; } BStructureType structureType = (BStructureType) typeDef.symbol.type; BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDef.typeNode; SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(structureTypeNode, typeDef.symbol.scope, pkgEnv); resolveFields(structureType, structureTypeNode, typeDefEnv); if (typeDef.symbol.kind != SymbolKind.RECORD) { return; } BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode; BRecordType recordType = (BRecordType) structureType; recordType.sealed = recordTypeNode.sealed; if (recordTypeNode.sealed && recordTypeNode.restFieldType != null) { dlog.error(recordTypeNode.restFieldType.pos, DiagnosticErrorCode.REST_FIELD_NOT_ALLOWED_IN_SEALED_RECORDS); return; } if (recordTypeNode.restFieldType != null) { recordType.restFieldType = symResolver.resolveTypeNode(recordTypeNode.restFieldType, typeDefEnv); return; } if (!recordTypeNode.sealed) { recordType.restFieldType = symTable.anydataType; return; } for (BLangType typeRef : recordTypeNode.typeRefs) { if (typeRef.type.tag != TypeTags.RECORD) { continue; } BType restFieldType = ((BRecordType) typeRef.type).restFieldType; if (restFieldType == symTable.noType) { continue; } if (recordType.restFieldType != null && !types.isSameType(recordType.restFieldType, restFieldType)) { recordType.restFieldType = symTable.noType; dlog.error(recordTypeNode.pos, DiagnosticErrorCode. CANNOT_USE_TYPE_INCLUSION_WITH_MORE_THAN_ONE_OPEN_RECORD_WITH_DIFFERENT_REST_DESCRIPTOR_TYPES); return; } recordType.restFieldType = restFieldType; recordType.sealed = false; } if (recordType.restFieldType != null) { return; } recordType.restFieldType = symTable.noType; }
class fields and object fields defineReferencedClassFields(classDefinition, typeDefEnv, objType, false); }
class fields and object fields defineReferencedClassFields(classDefinition, typeDefEnv, objType, false); }
``` type LE EmptyListener|Listener; listener LE lsn = new Listener(); ``` In this scenario, this method would return `EmptyListener`, right? Was wondering if that could cause issues, because for example, when we consider the attach method, we'll be retrieving `attach` of `EmptyListener` instead of `Listener` even though the actual value is `Listener`. Basic scenarios seem to work as expected though.
private BType getListenerType(BType type) { if (type.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (types.checkListenerCompatibility(memberType)) { return memberType; } } } return type; }
return memberType;
private BType getListenerType(BType type) { if (type.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (types.checkListenerCompatibility(memberType)) { return memberType; } } } return type; }
class ServiceDesugar { private static final CompilerContext.Key<ServiceDesugar> SERVICE_DESUGAR_KEY = new CompilerContext.Key<>(); private static final String START_METHOD = "start"; private static final String GRACEFUL_STOP = "gracefulStop"; private static final String ATTACH_METHOD = "attach"; private static final String LISTENER = "$LISTENER"; private final SymbolTable symTable; private final SymbolResolver symResolver; private final Names names; private HttpFiltersDesugar httpFiltersDesugar; private TransactionDesugar transactionDesugar; private final Types types; public static ServiceDesugar getInstance(CompilerContext context) { ServiceDesugar desugar = context.get(SERVICE_DESUGAR_KEY); if (desugar == null) { desugar = new ServiceDesugar(context); } return desugar; } private ServiceDesugar(CompilerContext context) { context.put(SERVICE_DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.httpFiltersDesugar = HttpFiltersDesugar.getInstance(context); this.transactionDesugar = TransactionDesugar.getInstance(context); this.types = Types.getInstance(context); } void rewriteListeners(List<BLangSimpleVariable> variables, SymbolEnv env, BLangFunction startFunction, BLangFunction stopFunction) { variables.stream().filter(varNode -> Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER)) .forEach(varNode -> rewriteListener(varNode, env, startFunction, stopFunction)); } private void rewriteListener(BLangSimpleVariable variable, SymbolEnv env, BLangFunction startFunction, BLangFunction stopFunction) { rewriteListenerLifeCycleFunction(startFunction, variable, env, START_METHOD); rewriteListenerLifeCycleFunction(stopFunction, variable, env, GRACEFUL_STOP); } private void rewriteListenerLifeCycleFunction(BLangFunction lifeCycleFunction, BLangSimpleVariable variable, SymbolEnv env, String method) { final Location pos = variable.pos; BTypeSymbol listenerTypeSymbol = getListenerType(variable.type).tsymbol; final Name functionName = names .fromString(Symbols.getAttachedFuncSymbolName(listenerTypeSymbol.name.value, method)); BInvokableSymbol methodInvocationSymbol = (BInvokableSymbol) symResolver .lookupMemberSymbol(pos, listenerTypeSymbol.scope, env, functionName, SymTag.INVOKABLE); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, variable.symbol); addMethodInvocation(pos, varRef, methodInvocationSymbol, Collections.emptyList(), (BLangBlockFunctionBody) lifeCycleFunction.body); } BLangBlockStmt rewriteServiceVariables(List<BLangService> services, SymbolEnv env) { BLangBlockStmt attachmentsBlock = (BLangBlockStmt) TreeBuilder.createBlockNode(); services.forEach(service -> rewriteServiceVariable(service, env, attachmentsBlock)); return attachmentsBlock; } void rewriteServiceVariable(BLangService service, SymbolEnv env, BLangBlockStmt attachments) { final Location pos = service.pos; ASTBuilderUtil.defineVariable(service.serviceVariable, env.enclPkg.symbol, names); env.enclPkg.globalVars.add(service.serviceVariable); int count = 0; for (BLangExpression attachExpr : service.attachedExprs) { BLangSimpleVarRef listenerVarRef; if (attachExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { listenerVarRef = (BLangSimpleVarRef) attachExpr; } else { BLangSimpleVariable listenerVar = ASTBuilderUtil .createVariable(pos, LISTENER + service.name.value + UNDERSCORE + count++, attachExpr.type, attachExpr, null); ASTBuilderUtil.defineVariable(listenerVar, env.enclPkg.symbol, names); listenerVar.symbol.flags |= Flags.LISTENER; env.enclPkg.globalVars.add(listenerVar); listenerVarRef = ASTBuilderUtil.createVariableRef(pos, listenerVar.symbol); } BTypeSymbol listenerTypeSymbol = getListenerType(listenerVarRef.type).tsymbol; final Name functionName = names .fromString(Symbols.getAttachedFuncSymbolName(listenerTypeSymbol.name.value, ATTACH_METHOD)); BInvokableSymbol methodRef = (BInvokableSymbol) symResolver .lookupMemberSymbol(pos, listenerTypeSymbol.scope, env, functionName, SymTag.INVOKABLE); List<BLangExpression> args = new ArrayList<>(); args.add(ASTBuilderUtil.createVariableRef(pos, service.serviceVariable.symbol)); if (service.getServiceNameLiteral() == null) { BLangListConstructorExpr.BLangArrayLiteral arrayLiteral = ASTBuilderUtil.createEmptyArrayLiteral(service.getPosition(), symTable.arrayStringType); for (IdentifierNode path : service.getAbsolutePath()) { var literal = ASTBuilderUtil.createLiteral(path.getPosition(), symTable.stringType, path.getValue()); arrayLiteral.exprs.add(literal); } args.add(arrayLiteral); } else { args.add((BLangExpression) service.getServiceNameLiteral()); } addMethodInvocation(pos, listenerVarRef, methodRef, args, attachments); } } private void addMethodInvocation(Location pos, BLangSimpleVarRef varRef, BInvokableSymbol methodRefSymbol, List<BLangExpression> args, BlockNode body) { final BLangInvocation methodInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, methodRefSymbol, args, symResolver); BLangCheckedExpr listenerCheckExpr = ASTBuilderUtil.createCheckExpr(pos, varRef, getListenerType(varRef.type)); listenerCheckExpr.equivalentErrorTypeList.add(symTable.errorType); methodInvocation.expr = listenerCheckExpr; BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(pos, methodInvocation, symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil.createExpressionStmt(pos, body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = pos; } void engageCustomServiceDesugar(BLangService service, SymbolEnv env) { service.serviceClass.functions.stream().filter(fun -> Symbols.isFlagOn(fun.symbol.flags, Flags.RESOURCE)) .forEach(func -> engageCustomResourceDesugar(func, env)); } private void engageCustomResourceDesugar(BLangFunction functionNode, SymbolEnv env) { if (Symbols.isFlagOn(functionNode.symbol.flags, Flags.TRANSACTIONAL)) { BLangExpressionStmt stmt = new BLangExpressionStmt(transactionDesugar .createBeginParticipantInvocation(functionNode.pos)); ((BLangBlockFunctionBody) functionNode.body).stmts.add(0, stmt); } httpFiltersDesugar.addHttpFilterStatementsToResource(functionNode, env); } }
class ServiceDesugar { private static final CompilerContext.Key<ServiceDesugar> SERVICE_DESUGAR_KEY = new CompilerContext.Key<>(); private static final String START_METHOD = "start"; private static final String GRACEFUL_STOP = "gracefulStop"; private static final String ATTACH_METHOD = "attach"; private static final String LISTENER = "$LISTENER"; private final SymbolTable symTable; private final SymbolResolver symResolver; private final Names names; private HttpFiltersDesugar httpFiltersDesugar; private TransactionDesugar transactionDesugar; private final Types types; public static ServiceDesugar getInstance(CompilerContext context) { ServiceDesugar desugar = context.get(SERVICE_DESUGAR_KEY); if (desugar == null) { desugar = new ServiceDesugar(context); } return desugar; } private ServiceDesugar(CompilerContext context) { context.put(SERVICE_DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.httpFiltersDesugar = HttpFiltersDesugar.getInstance(context); this.transactionDesugar = TransactionDesugar.getInstance(context); this.types = Types.getInstance(context); } void rewriteListeners(List<BLangSimpleVariable> variables, SymbolEnv env, BLangFunction startFunction, BLangFunction stopFunction) { variables.stream().filter(varNode -> Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER)) .forEach(varNode -> rewriteListener(varNode, env, startFunction, stopFunction)); } private void rewriteListener(BLangSimpleVariable variable, SymbolEnv env, BLangFunction startFunction, BLangFunction stopFunction) { rewriteListenerLifeCycleFunction(startFunction, variable, env, START_METHOD); rewriteListenerLifeCycleFunction(stopFunction, variable, env, GRACEFUL_STOP); } private void rewriteListenerLifeCycleFunction(BLangFunction lifeCycleFunction, BLangSimpleVariable variable, SymbolEnv env, String method) { final Location pos = variable.pos; BTypeSymbol listenerTypeSymbol = getListenerType(variable.type).tsymbol; final Name functionName = names .fromString(Symbols.getAttachedFuncSymbolName(listenerTypeSymbol.name.value, method)); BInvokableSymbol methodInvocationSymbol = (BInvokableSymbol) symResolver .lookupMemberSymbol(pos, listenerTypeSymbol.scope, env, functionName, SymTag.INVOKABLE); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, variable.symbol); addMethodInvocation(pos, varRef, methodInvocationSymbol, Collections.emptyList(), (BLangBlockFunctionBody) lifeCycleFunction.body); } BLangBlockStmt rewriteServiceVariables(List<BLangService> services, SymbolEnv env) { BLangBlockStmt attachmentsBlock = (BLangBlockStmt) TreeBuilder.createBlockNode(); services.forEach(service -> rewriteServiceVariable(service, env, attachmentsBlock)); return attachmentsBlock; } void rewriteServiceVariable(BLangService service, SymbolEnv env, BLangBlockStmt attachments) { final Location pos = service.pos; ASTBuilderUtil.defineVariable(service.serviceVariable, env.enclPkg.symbol, names); env.enclPkg.globalVars.add(service.serviceVariable); int count = 0; for (BLangExpression attachExpr : service.attachedExprs) { BLangSimpleVarRef listenerVarRef; if (attachExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { listenerVarRef = (BLangSimpleVarRef) attachExpr; } else { BLangSimpleVariable listenerVar = ASTBuilderUtil .createVariable(pos, LISTENER + service.name.value + UNDERSCORE + count++, attachExpr.type, attachExpr, null); ASTBuilderUtil.defineVariable(listenerVar, env.enclPkg.symbol, names); listenerVar.symbol.flags |= Flags.LISTENER; env.enclPkg.globalVars.add(listenerVar); listenerVarRef = ASTBuilderUtil.createVariableRef(pos, listenerVar.symbol); } if (types.containsErrorType(listenerVarRef.type)) { BLangCheckedExpr listenerCheckExpr = ASTBuilderUtil.createCheckExpr(pos, listenerVarRef, getListenerType(listenerVarRef.type)); listenerCheckExpr.equivalentErrorTypeList.add(symTable.errorType); BLangSimpleVariable listenerWithoutErrors = ASTBuilderUtil.createVariable(pos, LISTENER + "$CheckTemp" + count, getListenerTypeWithoutError(listenerVarRef.type), listenerCheckExpr, null); ASTBuilderUtil.defineVariable(listenerWithoutErrors, env.enclPkg.symbol, names); env.enclPkg.globalVars.add(listenerWithoutErrors); BLangSimpleVarRef checkedRef = ASTBuilderUtil.createVariableRef(pos, listenerWithoutErrors.symbol); listenerVarRef = checkedRef; } BTypeSymbol listenerTypeSymbol = getListenerType(listenerVarRef.type).tsymbol; final Name functionName = names .fromString(Symbols.getAttachedFuncSymbolName(listenerTypeSymbol.name.value, ATTACH_METHOD)); BInvokableSymbol methodRef = (BInvokableSymbol) symResolver .lookupMemberSymbol(pos, listenerTypeSymbol.scope, env, functionName, SymTag.INVOKABLE); List<BLangExpression> args = new ArrayList<>(); args.add(ASTBuilderUtil.createVariableRef(pos, service.serviceVariable.symbol)); if (service.getServiceNameLiteral() == null) { BLangListConstructorExpr.BLangArrayLiteral arrayLiteral = ASTBuilderUtil.createEmptyArrayLiteral(service.getPosition(), symTable.arrayStringType); for (IdentifierNode path : service.getAbsolutePath()) { var literal = ASTBuilderUtil.createLiteral(path.getPosition(), symTable.stringType, path.getValue()); arrayLiteral.exprs.add(literal); } args.add(arrayLiteral); } else { args.add((BLangExpression) service.getServiceNameLiteral()); } addMethodInvocation(pos, listenerVarRef, methodRef, args, attachments); } } private BType getListenerTypeWithoutError(BType type) { if (type.tag == TypeTags.UNION) { LinkedHashSet<BType> members = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (types.isAssignable(memberType, symTable.errorType)) { continue; } members.add(memberType); } return BUnionType.create(null, members); } return type; } private void addMethodInvocation(Location pos, BLangSimpleVarRef varRef, BInvokableSymbol methodRefSymbol, List<BLangExpression> args, BlockNode body) { final BLangInvocation methodInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, methodRefSymbol, args, symResolver); BType listenerType = getListenerType(varRef.type); if (!types.isSameType(listenerType, varRef.type)) { BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); castExpr.expr = varRef; castExpr.type = listenerType; castExpr.targetType = castExpr.type; methodInvocation.expr = castExpr; } else { methodInvocation.expr = varRef; } BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(pos, methodInvocation, symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil.createExpressionStmt(pos, body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = pos; } void engageCustomServiceDesugar(BLangService service, SymbolEnv env) { service.serviceClass.functions.stream().filter(fun -> Symbols.isFlagOn(fun.symbol.flags, Flags.RESOURCE)) .forEach(func -> engageCustomResourceDesugar(func, env)); } private void engageCustomResourceDesugar(BLangFunction functionNode, SymbolEnv env) { if (Symbols.isFlagOn(functionNode.symbol.flags, Flags.TRANSACTIONAL)) { BLangExpressionStmt stmt = new BLangExpressionStmt(transactionDesugar .createBeginParticipantInvocation(functionNode.pos)); ((BLangBlockFunctionBody) functionNode.body).stmts.add(0, stmt); } httpFiltersDesugar.addHttpFilterStatementsToResource(functionNode, env); } }
I modified this logic to work regardless of the position of the delimiter.
public void handle(Buffer buffer) { try { byte[] bytes = buffer.getBytes(); MediaType mediaType = response.getMediaType(); if (isNewlineDelimited) { String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER); charset = charset == null ? "UTF-8" : charset; byte[] separator = "\n".getBytes(charset); int start = 0; while (start < bytes.length) { int end = bytes.length; for (int i = start; i < end; i++) { if (bytes[i] == separator[0]) { int j; boolean matches = true; for (j = 1; j < separator.length; j++) { if (bytes[i + j] != separator[j]) { matches = false; break; } } if (matches) { end = i; break; } } } if (start < end) { ByteArrayInputStream in = new ByteArrayInputStream(bytes, start, end); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } start = end + separator.length; } } else { ByteArrayInputStream in = new ByteArrayInputStream(bytes); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } } catch (Throwable t) { multiRequest.emitter.fail(t); } }
String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER);
public void handle(Buffer buffer) { try { byte[] bytes = buffer.getBytes(); MediaType mediaType = response.getMediaType(); if (isNewlineDelimited) { String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER); charset = charset == null ? "UTF-8" : charset; byte[] separator = "\n".getBytes(charset); int start = 0; while (start < bytes.length) { int end = bytes.length; for (int i = start; i < end; i++) { if (bytes[i] == separator[0]) { int j; boolean matches = true; for (j = 1; j < separator.length; j++) { if (bytes[i + j] != separator[j]) { matches = false; break; } } if (matches) { end = i; break; } } } if (start < end) { ByteArrayInputStream in = new ByteArrayInputStream(bytes, start, end); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } start = end + separator.length; } } else { ByteArrayInputStream in = new ByteArrayInputStream(bytes); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } } catch (Throwable t) { multiRequest.emitter.fail(t); } }
class MultiRequest<R> { private final AtomicReference<Runnable> onCancel = new AtomicReference<>(); private final MultiEmitter<? super R> emitter; private static final Runnable CLEARED = () -> { }; public MultiRequest(MultiEmitter<? super R> emitter) { this.emitter = emitter; emitter.onTermination(() -> { if (emitter.isCancelled()) { this.cancel(); } }); } void emit(R item) { if (!isCancelled()) { emitter.emit(item); } } void fail(Throwable t) { if (!isCancelled()) { emitter.fail(t); cancel(); } } void complete() { if (!isCancelled()) { emitter.complete(); cancel(); } } public boolean isCancelled() { return onCancel.get() == CLEARED; } private void cancel() { Runnable action = onCancel.getAndSet(CLEARED); if (action != null && action != CLEARED) { action.run(); } } public void onCancel(Runnable onCancel) { if (this.onCancel.compareAndSet(null, onCancel)) { } else if (this.onCancel.get() == CLEARED) { if (onCancel != null) onCancel.run(); } else { throw new IllegalArgumentException("onCancel was already called"); } } }
class MultiRequest<R> { private final AtomicReference<Runnable> onCancel = new AtomicReference<>(); private final MultiEmitter<? super R> emitter; private static final Runnable CLEARED = () -> { }; public MultiRequest(MultiEmitter<? super R> emitter) { this.emitter = emitter; emitter.onTermination(() -> { if (emitter.isCancelled()) { this.cancel(); } }); } void emit(R item) { if (!isCancelled()) { emitter.emit(item); } } void fail(Throwable t) { if (!isCancelled()) { emitter.fail(t); cancel(); } } void complete() { if (!isCancelled()) { emitter.complete(); cancel(); } } public boolean isCancelled() { return onCancel.get() == CLEARED; } private void cancel() { Runnable action = onCancel.getAndSet(CLEARED); if (action != null && action != CLEARED) { action.run(); } } public void onCancel(Runnable onCancel) { if (this.onCancel.compareAndSet(null, onCancel)) { } else if (this.onCancel.get() == CLEARED) { if (onCancel != null) onCancel.run(); } else { throw new IllegalArgumentException("onCancel was already called"); } } }
We can use methods on `EnvironmentSettings` to check the runtime mode and planner.
private TableEnvironment createTableEnvironment() { EnvironmentSettings settings = EnvironmentSettings.fromConfiguration(flinkConfig); TableConfig config = new TableConfig(); config.addConfiguration(flinkConfig); if (flinkConfig.get(ExecutionOptions.RUNTIME_MODE).equals(RuntimeExecutionMode.BATCH) && flinkConfig.get(TableConfigOptions.TABLE_PLANNER).equals(PlannerType.OLD)) { ExecutionEnvironment execEnv = createExecutionEnvironment(); return new BatchTableEnvironmentImpl( execEnv, config, sessionState.catalogManager, sessionState.moduleManager); } else { StreamExecutionEnvironment streamExecEnv = createStreamExecutionEnvironment(); final Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, streamExecEnv); return createStreamTableEnvironment( streamExecEnv, settings, config, executor, sessionState.catalogManager, sessionState.moduleManager, sessionState.functionCatalog, classLoader); } }
&& flinkConfig.get(TableConfigOptions.TABLE_PLANNER).equals(PlannerType.OLD)) {
private TableEnvironment createTableEnvironment() { EnvironmentSettings settings = EnvironmentSettings.fromConfiguration(flinkConfig); TableConfig config = new TableConfig(); config.addConfiguration(flinkConfig); if (!settings.isStreamingMode() && !settings.isBlinkPlanner()) { ExecutionEnvironment execEnv = createExecutionEnvironment(); return new BatchTableEnvironmentImpl( execEnv, config, sessionState.catalogManager, sessionState.moduleManager); } else { StreamExecutionEnvironment streamExecEnv = createStreamExecutionEnvironment(); final Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, streamExecEnv); return createStreamTableEnvironment( streamExecEnv, settings, config, executor, sessionState.catalogManager, sessionState.moduleManager, sessionState.functionCatalog, classLoader); } }
class ExecutionContext { private final Configuration flinkConfig; private final SessionState sessionState; private final URLClassLoader classLoader; private final TableEnvironment tableEnv; public ExecutionContext( Configuration flinkConfig, URLClassLoader classLoader, SessionState sessionState) { this.flinkConfig = flinkConfig; this.sessionState = sessionState; this.classLoader = classLoader; this.tableEnv = createTableEnvironment(); } /** * Create a new {@link ExecutionContext}. * * <p>It just copies from the {@link ExecutionContext} and rebuild a new {@link * TableEnvironment}. */ public ExecutionContext(ExecutionContext context) { this.flinkConfig = context.flinkConfig; this.sessionState = context.sessionState; this.classLoader = context.classLoader; this.tableEnv = createTableEnvironment(); } /** * Executes the given supplier using the execution context's classloader as thread classloader. */ public <R> R wrapClassLoader(Supplier<R> supplier) { try (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(classLoader)) { return supplier.get(); } } public TableEnvironment getTableEnvironment() { return tableEnv; } private TableEnvironment createStreamTableEnvironment( StreamExecutionEnvironment env, EnvironmentSettings settings, TableConfig config, Executor executor, CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog, ClassLoader userClassLoader) { final Map<String, String> plannerProperties = settings.toPlannerProperties(); final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, config, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, moduleManager, functionCatalog, config, env, planner, executor, settings.isStreamingMode(), userClassLoader); } private Executor lookupExecutor( Map<String, String> executorProperties, StreamExecutionEnvironment executionEnvironment) { try { ExecutorFactory executorFactory = ComponentFactoryService.find(ExecutorFactory.class, executorProperties); Method createMethod = executorFactory .getClass() .getMethod("create", Map.class, StreamExecutionEnvironment.class); return (Executor) createMethod.invoke(executorFactory, executorProperties, executionEnvironment); } catch (Exception e) { throw new TableException( "Could not instantiate the executor. Make sure a planner module is on the classpath", e); } } private StreamExecutionEnvironment createStreamExecutionEnvironment() { return new StreamExecutionEnvironment(new Configuration(flinkConfig), classLoader); } private ExecutionEnvironment createExecutionEnvironment() { ExecutionEnvironment execEnv = ExecutionEnvironment.getExecutionEnvironment(); execEnv.getConfiguration().addAll(flinkConfig); return execEnv; } }
class ExecutionContext { private final Configuration flinkConfig; private final SessionState sessionState; private final URLClassLoader classLoader; private final TableEnvironment tableEnv; public ExecutionContext( Configuration flinkConfig, URLClassLoader classLoader, SessionState sessionState) { this.flinkConfig = flinkConfig; this.sessionState = sessionState; this.classLoader = classLoader; this.tableEnv = createTableEnvironment(); } /** * Create a new {@link ExecutionContext}. * * <p>It just copies from the {@link ExecutionContext} and rebuild a new {@link * TableEnvironment}. */ public ExecutionContext(ExecutionContext context) { this.flinkConfig = context.flinkConfig; this.sessionState = context.sessionState; this.classLoader = context.classLoader; this.tableEnv = createTableEnvironment(); } /** * Executes the given supplier using the execution context's classloader as thread classloader. */ public <R> R wrapClassLoader(Supplier<R> supplier) { try (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(classLoader)) { return supplier.get(); } } public TableEnvironment getTableEnvironment() { return tableEnv; } private TableEnvironment createStreamTableEnvironment( StreamExecutionEnvironment env, EnvironmentSettings settings, TableConfig config, Executor executor, CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog, ClassLoader userClassLoader) { final Map<String, String> plannerProperties = settings.toPlannerProperties(); final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, config, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, moduleManager, functionCatalog, config, env, planner, executor, settings.isStreamingMode(), userClassLoader); } private Executor lookupExecutor( Map<String, String> executorProperties, StreamExecutionEnvironment executionEnvironment) { try { ExecutorFactory executorFactory = ComponentFactoryService.find(ExecutorFactory.class, executorProperties); Method createMethod = executorFactory .getClass() .getMethod("create", Map.class, StreamExecutionEnvironment.class); return (Executor) createMethod.invoke(executorFactory, executorProperties, executionEnvironment); } catch (Exception e) { throw new TableException( "Could not instantiate the executor. Make sure a planner module is on the classpath", e); } } private StreamExecutionEnvironment createStreamExecutionEnvironment() { return new StreamExecutionEnvironment(new Configuration(flinkConfig), classLoader); } private ExecutionEnvironment createExecutionEnvironment() { ExecutionEnvironment execEnv = ExecutionEnvironment.getExecutionEnvironment(); execEnv.getConfiguration().addAll(flinkConfig); return execEnv; } }
Hmm, we could also read the session status as `PREPARED`, and active as `false`, but then the session is activated while we delete it. I don't remember without looking at the code, but I believe those writes are protected by a lock. Alternatively, we could do the deletion with a zknode version condition ... which I'm not sure we expose through our own APIs >_<
public void deleteExpiredRemoteAndLocalSessions(Clock clock, Predicate<Session> sessionIsActiveForApplication) { Set<Long> sessions = getLocalSessionsIdsFromFileSystem(); sessions.addAll(getRemoteSessionsFromZooKeeper()); log.log(Level.FINE, () -> "Sessions for tenant " + tenantName + ": " + sessions); Set<Long> newSessions = findNewSessionsInFileSystem(); sessions.removeAll(newSessions); int deleteMax = (int) Math.min(1000, Math.max(50, sessions.size() * 0.05)); int deleted = 0; for (Long sessionId : sessions) { try { Session session = remoteSessionCache.get(sessionId); if (session == null) session = new RemoteSession(tenantName, sessionId, createSessionZooKeeperClient(sessionId)); Session.Status status = session.getStatus(); boolean activeForApplication = sessionIsActiveForApplication.test(session); if (status == ACTIVATE && activeForApplication) continue; Instant createTime = session.getCreateTime(); boolean hasExpired = hasExpired(createTime); if ( ! hasExpired) continue; log.log(Level.FINE, () -> "Remote session " + sessionId + " for " + tenantName + " has expired, deleting it"); deleteRemoteSessionFromZooKeeper(session); deleted++; log.log(Level.FINE, () -> "Expired local session is candidate for deletion: " + sessionId + ", created: " + createTime + ", status " + status + ", can be deleted: " + canBeDeleted(sessionId, status)); if (canBeDeleted(sessionId, status)) { deleteLocalSession(sessionId); deleted++; } else if (isOldAndCanBeDeleted(sessionId, createTime)) { var localSession = getOptionalSessionFromFileSystem(sessionId); if (localSession.isEmpty()) continue; var applicationId = localSession.get().getOptionalApplicationId(); if (applicationId.isEmpty()) continue; if ( ! activeForApplication) { log.log(Level.FINE, () -> "Will delete expired session " + sessionId + " created " + createTime + " for '" + applicationId + "'"); deleteLocalSession(sessionId); deleted++; } } if (deleted >= deleteMax) return; } catch (Throwable e) { log.log(Level.WARNING, "Error when deleting expired sessions ", e); } } log.log(Level.FINE, () -> "Done deleting expired sessions"); }
deleted++;
public void deleteExpiredRemoteAndLocalSessions(Clock clock, Predicate<Session> sessionIsActiveForApplication) { Set<Long> sessions = getLocalSessionsIdsFromFileSystem(); sessions.addAll(getRemoteSessionsFromZooKeeper()); log.log(Level.FINE, () -> "Sessions for tenant " + tenantName + ": " + sessions); Set<Long> newSessions = findNewSessionsInFileSystem(); sessions.removeAll(newSessions); int deleteMax = (int) Math.min(1000, Math.max(50, sessions.size() * 0.05)); int deleted = 0; for (Long sessionId : sessions) { try { Session session = remoteSessionCache.get(sessionId); if (session == null) session = new RemoteSession(tenantName, sessionId, createSessionZooKeeperClient(sessionId)); var applicationId = session.getApplicationId(); try (var ignored = applicationRepo.lock(applicationId)) { Session.Status status = session.getStatus(); boolean activeForApplication = sessionIsActiveForApplication.test(session); if (status == ACTIVATE && activeForApplication) continue; Instant createTime = session.getCreateTime(); boolean hasExpired = hasExpired(createTime); if (! hasExpired) continue; log.log(Level.FINE, () -> "Remote session " + sessionId + " for " + tenantName + " has expired, deleting it"); deleteRemoteSessionFromZooKeeper(session); deleted++; log.log(Level.FINE, () -> "Expired local session is candidate for deletion: " + sessionId + ", created: " + createTime + ", status " + status + ", can be deleted: " + canBeDeleted(sessionId, status)); if (canBeDeleted(sessionId, status)) { deleteLocalSession(sessionId); deleted++; } else if (isOldAndCanBeDeleted(sessionId, createTime)) { var localSession = getOptionalSessionFromFileSystem(sessionId); if (localSession.isEmpty()) continue; if (! activeForApplication) { log.log(Level.FINE, () -> "Will delete expired session " + sessionId + " created " + createTime + " for '" + applicationId + "'"); deleteLocalSession(sessionId); deleted++; } } if (deleted >= deleteMax) return; } } catch (Throwable e) { log.log(Level.WARNING, "Error when deleting expired sessions ", e); } } log.log(Level.FINE, () -> "Done deleting expired sessions"); }
class SessionRepository { private static final Logger log = Logger.getLogger(SessionRepository.class.getName()); private static final FilenameFilter sessionApplicationsFilter = (dir, name) -> name.matches("\\d+"); private static final long nonExistingActiveSessionId = 0; private final Object monitor = new Object(); private final Map<Long, LocalSession> localSessionCache = Collections.synchronizedMap(new HashMap<>()); private final Map<Long, RemoteSession> remoteSessionCache = Collections.synchronizedMap(new HashMap<>()); private final Map<Long, SessionStateWatcher> sessionStateWatchers = Collections.synchronizedMap(new HashMap<>()); private final Clock clock; private final Curator curator; private final Executor zkWatcherExecutor; private final FileDistributionFactory fileDistributionFactory; private final FlagSource flagSource; private final TenantFileSystemDirs tenantFileSystemDirs; private final Metrics metrics; private final MetricUpdater metricUpdater; private final Curator.DirectoryCache directoryCache; private final TenantApplications applicationRepo; private final SessionPreparer sessionPreparer; private final Path sessionsPath; private final TenantName tenantName; private final OnnxModelCost onnxModelCost; private final List<EndpointCertificateSecretStore> endpointCertificateSecretStores; private final SessionCounter sessionCounter; private final SecretStore secretStore; private final HostProvisionerProvider hostProvisionerProvider; private final ConfigserverConfig configserverConfig; private final ConfigServerDB configServerDB; private final Zone zone; private final ModelFactoryRegistry modelFactoryRegistry; private final ConfigDefinitionRepo configDefinitionRepo; private final int maxNodeSize; private final LongFlag expiryTimeFlag; private final BooleanFlag writeSessionData; private final BooleanFlag readSessionData; public SessionRepository(TenantName tenantName, TenantApplications applicationRepo, SessionPreparer sessionPreparer, Curator curator, Metrics metrics, StripedExecutor<TenantName> zkWatcherExecutor, FileDistributionFactory fileDistributionFactory, FlagSource flagSource, ExecutorService zkCacheExecutor, SecretStore secretStore, HostProvisionerProvider hostProvisionerProvider, ConfigserverConfig configserverConfig, ConfigServerDB configServerDB, Zone zone, Clock clock, ModelFactoryRegistry modelFactoryRegistry, ConfigDefinitionRepo configDefinitionRepo, int maxNodeSize, OnnxModelCost onnxModelCost, List<EndpointCertificateSecretStore> endpointCertificateSecretStores) { this.tenantName = tenantName; this.onnxModelCost = onnxModelCost; this.endpointCertificateSecretStores = endpointCertificateSecretStores; sessionCounter = new SessionCounter(curator, tenantName); this.sessionsPath = TenantRepository.getSessionsPath(tenantName); this.clock = clock; this.curator = curator; this.zkWatcherExecutor = command -> zkWatcherExecutor.execute(tenantName, command); this.fileDistributionFactory = fileDistributionFactory; this.flagSource = flagSource; this.tenantFileSystemDirs = new TenantFileSystemDirs(configServerDB, tenantName); this.applicationRepo = applicationRepo; this.sessionPreparer = sessionPreparer; this.metrics = metrics; this.metricUpdater = metrics.getOrCreateMetricUpdater(Metrics.createDimensions(tenantName)); this.secretStore = secretStore; this.hostProvisionerProvider = hostProvisionerProvider; this.configserverConfig = configserverConfig; this.configServerDB = configServerDB; this.zone = zone; this.modelFactoryRegistry = modelFactoryRegistry; this.configDefinitionRepo = configDefinitionRepo; this.maxNodeSize = maxNodeSize; this.expiryTimeFlag = PermanentFlags.CONFIG_SERVER_SESSION_EXPIRY_TIME.bindTo(flagSource); this.writeSessionData = Flags.WRITE_CONFIG_SERVER_SESSION_DATA_AS_ONE_BLOB.bindTo(flagSource); this.readSessionData = Flags.READ_CONFIG_SERVER_SESSION_DATA_AS_ONE_BLOB.bindTo(flagSource); loadSessions(); this.directoryCache = curator.createDirectoryCache(sessionsPath.getAbsolute(), false, false, zkCacheExecutor); this.directoryCache.addListener(this::childEvent); this.directoryCache.start(); } private void loadSessions() { ExecutorService executor = Executors.newFixedThreadPool(Math.max(8, Runtime.getRuntime().availableProcessors()), new DaemonThreadFactory("load-sessions-")); loadSessions(executor); } void loadSessions(ExecutorService executor) { loadRemoteSessions(executor); try { executor.shutdown(); if ( ! executor.awaitTermination(1, TimeUnit.MINUTES)) log.log(Level.INFO, "Executor did not terminate"); } catch (InterruptedException e) { log.log(Level.WARNING, "Shutdown of executor for loading sessions failed: " + Exceptions.toMessageString(e)); } } public void addLocalSession(LocalSession session) { long sessionId = session.getSessionId(); localSessionCache.put(sessionId, session); if (remoteSessionCache.get(sessionId) == null) createRemoteSession(sessionId); } public LocalSession getLocalSession(long sessionId) { return localSessionCache.get(sessionId); } /** Returns a copy of local sessions */ public Collection<LocalSession> getLocalSessions() { return List.copyOf(localSessionCache.values()); } private LocalSession getSessionFromFile(long sessionId) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); File sessionDir = getAndValidateExistingSessionAppDir(sessionId); ApplicationPackage applicationPackage = FilesApplicationPackage.fromFile(sessionDir); return new LocalSession(tenantName, sessionId, applicationPackage, sessionZKClient); } public Set<Long> getLocalSessionsIdsFromFileSystem() { File[] sessions = tenantFileSystemDirs.sessionsPath().listFiles(sessionApplicationsFilter); if (sessions == null) return Set.of(); Set<Long> sessionIds = new HashSet<>(); for (File session : sessions) { long sessionId = Long.parseLong(session.getName()); sessionIds.add(sessionId); } return sessionIds; } public ConfigChangeActions prepareLocalSession(Session session, DeployLogger logger, PrepareParams params, Instant now) { params.vespaVersion().ifPresent(version -> { if ( ! params.isBootstrap() && ! modelFactoryRegistry.allVersions().contains(version)) throw new UnknownVespaVersionException("Vespa version '" + version + "' not known by this config server"); }); ApplicationId applicationId = params.getApplicationId(); applicationRepo.createApplication(applicationId); logger.log(Level.FINE, "Created application " + applicationId); long sessionId = session.getSessionId(); SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(sessionId); Optional<CompletionWaiter> waiter = params.isDryRun() ? Optional.empty() : Optional.of(sessionZooKeeperClient.createPrepareWaiter()); Optional<ApplicationVersions> activeApplicationVersions = activeApplicationVersions(applicationId); try (var transaction = new CuratorTransaction(curator)) { applicationRepo.createWritePrepareTransaction(transaction, applicationId, sessionId, getActiveSessionId(applicationId)) .commit(); } ConfigChangeActions actions = sessionPreparer.prepare(applicationRepo, logger, params, activeApplicationVersions, now, getSessionAppDir(sessionId), session.getApplicationPackage(), sessionZooKeeperClient) .getConfigChangeActions(); setPrepared(session); waiter.ifPresent(w -> w.awaitCompletion(params.getTimeoutBudget().timeLeft())); return actions; } /** * Creates a new deployment session from an already existing session. * * @param existingSession the session to use as base * @param internalRedeploy whether this session is for a system internal redeploy — not an application package change * @param timeoutBudget timeout for creating session and waiting for other servers. * @return a new session */ public LocalSession createSessionFromExisting(Session existingSession, boolean internalRedeploy, TimeoutBudget timeoutBudget, DeployLogger deployLogger) { ApplicationId applicationId = existingSession.getApplicationId(); File existingApp = getSessionAppDir(existingSession.getSessionId()); Instant created = clock.instant(); LocalSession session = createSessionFromApplication(existingApp, applicationId, internalRedeploy, timeoutBudget, deployLogger, created); applicationRepo.createApplication(applicationId); write(existingSession, session, applicationId, created); return session; } /** * Creates a new deployment session from an application package. * * @param applicationDirectory a File pointing to an application. * @param applicationId application id for this new session. * @param timeoutBudget Timeout for creating session and waiting for other servers. * @return a new session */ public LocalSession createSessionFromApplicationPackage(File applicationDirectory, ApplicationId applicationId, TimeoutBudget timeoutBudget, DeployLogger deployLogger) { LocalSession session = createSessionFromApplication(applicationDirectory, applicationId, false, timeoutBudget, deployLogger, clock.instant()); applicationRepo.createApplication(applicationId); return session; } /** * Creates a local session based on a remote session and the distributed application package. * Does not wait for session being created on other servers. */ private void createLocalSession(File applicationFile, ApplicationId applicationId, long sessionId) { try { ApplicationPackage applicationPackage = createApplicationPackage(applicationFile, applicationId, sessionId, false, Optional.empty()); createLocalSession(sessionId, applicationPackage); } catch (Exception e) { throw new RuntimeException("Error creating session " + sessionId, e); } } public void deleteLocalSession(long sessionId) { log.log(Level.FINE, () -> "Deleting local session " + sessionId); SessionStateWatcher watcher = sessionStateWatchers.remove(sessionId); if (watcher != null) watcher.close(); localSessionCache.remove(sessionId); NestedTransaction transaction = new NestedTransaction(); transaction.add(FileTransaction.from(FileOperations.delete(getSessionAppDir(sessionId).getAbsolutePath()))); transaction.commit(); } private void deleteAllSessions() { for (LocalSession session : getLocalSessions()) { deleteLocalSession(session.getSessionId()); } } public RemoteSession getRemoteSession(long sessionId) { return remoteSessionCache.get(sessionId); } /** Returns a copy of remote sessions */ public Collection<RemoteSession> getRemoteSessions() { return List.copyOf(remoteSessionCache.values()); } public List<Long> getRemoteSessionsFromZooKeeper() { return getSessionList(curator.getChildren(sessionsPath)); } public RemoteSession createRemoteSession(long sessionId) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); RemoteSession session = new RemoteSession(tenantName, sessionId, sessionZKClient); loadSessionIfActive(session); remoteSessionCache.put(sessionId, session); updateSessionStateWatcher(sessionId); return session; } public int deleteExpiredRemoteSessions(Predicate<Session> sessionIsActiveForApplication) { List<Long> remoteSessionsFromZooKeeper = getRemoteSessionsFromZooKeeper(); log.log(Level.FINE, () -> "Remote sessions for tenant " + tenantName + ": " + remoteSessionsFromZooKeeper); int deleted = 0; int deleteMax = (int) Math.min(1000, Math.max(50, remoteSessionsFromZooKeeper.size() * 0.05)); for (Long sessionId : remoteSessionsFromZooKeeper) { Session session = remoteSessionCache.get(sessionId); if (session == null) session = new RemoteSession(tenantName, sessionId, createSessionZooKeeperClient(sessionId)); if (session.getStatus() == Session.Status.ACTIVATE && sessionIsActiveForApplication.test(session)) continue; if (sessionHasExpired(session.getCreateTime())) { log.log(Level.FINE, () -> "Remote session " + sessionId + " for " + tenantName + " has expired, deleting it"); deleteRemoteSessionFromZooKeeper(session); deleted++; } if (deleted >= deleteMax) break; } return deleted; } public void deactivateSession(long sessionId) { var s = remoteSessionCache.get(sessionId); if (s == null) return; remoteSessionCache.put(sessionId, s.deactivated()); } public void deleteRemoteSessionFromZooKeeper(Session session) { SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(session.getSessionId()); Transaction transaction = sessionZooKeeperClient.deleteTransaction(); transaction.commit(); transaction.close(); } private boolean sessionHasExpired(Instant created) { var expiryTime = Duration.ofSeconds(expiryTimeFlag.value()); return created.plus(expiryTime).isBefore(clock.instant()); } private List<Long> getSessionListFromDirectoryCache(List<ChildData> children) { return getSessionList(children.stream() .map(child -> Path.fromString(child.getPath()).getName()) .toList()); } private List<Long> getSessionList(List<String> children) { return children.stream().map(Long::parseLong).toList(); } private void loadRemoteSessions(ExecutorService executor) throws NumberFormatException { Map<Long, Future<?>> futures = new HashMap<>(); for (long sessionId : getRemoteSessionsFromZooKeeper()) { futures.put(sessionId, executor.submit(() -> sessionAdded(sessionId))); } futures.forEach((sessionId, future) -> { try { future.get(); log.log(Level.FINE, () -> "Remote session " + sessionId + " loaded"); } catch (ExecutionException | InterruptedException e) { throw new RuntimeException("Could not load remote session " + sessionId, e); } }); } /** * A session for which we don't have a watcher, i.e. hitherto unknown to us. * * @param sessionId session id for the new session */ public void sessionAdded(long sessionId) { if (hasStatusDeleted(sessionId)) return; log.log(Level.FINE, () -> "Adding remote session " + sessionId); Session session = createRemoteSession(sessionId); if (session.getStatus() == NEW) { log.log(Level.FINE, () -> session.logPre() + "Confirming upload for session " + sessionId); confirmUpload(session); } createLocalSessionFromDistributedApplicationPackage(sessionId); } private boolean hasStatusDeleted(long sessionId) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); RemoteSession session = new RemoteSession(tenantName, sessionId, sessionZKClient); return session.getStatus() == Session.Status.DELETE; } void activate(long sessionId) { createLocalSessionFromDistributedApplicationPackage(sessionId); RemoteSession session = remoteSessionCache.get(sessionId); if (session == null) return; CompletionWaiter waiter = createSessionZooKeeperClient(sessionId).getActiveWaiter(); log.log(Level.FINE, () -> session.logPre() + "Activating " + sessionId); applicationRepo.activateApplication(ensureApplicationLoaded(session), sessionId); log.log(Level.FINE, () -> session.logPre() + "Notifying " + waiter); notifyCompletion(waiter); log.log(Level.INFO, session.logPre() + "Session activated: " + sessionId); } private void loadSessionIfActive(RemoteSession session) { for (ApplicationId applicationId : applicationRepo.activeApplications()) { Optional<Long> activeSession = applicationRepo.activeSessionOf(applicationId); if (activeSession.isPresent() && activeSession.get() == session.getSessionId()) { log.log(Level.FINE, () -> "Found active application for session " + session.getSessionId() + " , loading it"); applicationRepo.activateApplication(ensureApplicationLoaded(session), session.getSessionId()); log.log(Level.INFO, session.logPre() + "Application activated successfully: " + applicationId + " (generation " + session.getSessionId() + ")"); return; } } } void prepareRemoteSession(long sessionId) { createLocalSessionFromDistributedApplicationPackage(sessionId); RemoteSession session = remoteSessionCache.get(sessionId); if (session == null) return; SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(sessionId); CompletionWaiter waiter = sessionZooKeeperClient.getPrepareWaiter(); ensureApplicationLoaded(session); notifyCompletion(waiter); } public ApplicationVersions ensureApplicationLoaded(RemoteSession session) { if (session.applicationVersions().isPresent()) { return session.applicationVersions().get(); } Optional<Long> activeSessionId = getActiveSessionId(session.getApplicationId()); Optional<ApplicationVersions> previousActiveApplicationVersions = activeSessionId.filter(session::isNewerThan) .flatMap(this::activeApplicationVersions); ApplicationVersions applicationVersions = loadApplication(session, previousActiveApplicationVersions); RemoteSession activated = session.activated(applicationVersions); long sessionId = activated.getSessionId(); remoteSessionCache.put(sessionId, activated); updateSessionStateWatcher(sessionId); return applicationVersions; } void confirmUpload(Session session) { CompletionWaiter waiter = createSessionZooKeeperClient(session.getSessionId()).getUploadWaiter(); long sessionId = session.getSessionId(); log.log(Level.FINE, () -> "Notifying upload waiter for session " + sessionId); notifyCompletion(waiter); log.log(Level.FINE, () -> "Done notifying upload for session " + sessionId); } void notifyCompletion(CompletionWaiter completionWaiter) { try { completionWaiter.notifyCompletion(); } catch (RuntimeException e) { Set<Class<? extends KeeperException>> acceptedExceptions = Set.of(KeeperException.NoNodeException.class, KeeperException.NodeExistsException.class); Class<? extends Throwable> exceptionClass = e.getCause().getClass(); if (acceptedExceptions.contains(exceptionClass)) log.log(Level.FINE, () -> "Not able to notify completion for session (" + completionWaiter + ")," + " node " + (exceptionClass.equals(KeeperException.NoNodeException.class) ? "has been deleted" : "already exists")); else throw e; } } private ApplicationVersions loadApplication(Session session, Optional<ApplicationVersions> previousApplicationVersions) { log.log(Level.FINE, () -> "Loading application for " + session); SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(session.getSessionId()); ActivatedModelsBuilder builder = new ActivatedModelsBuilder(session.getTenantName(), session.getSessionId(), sessionZooKeeperClient, previousApplicationVersions, sessionPreparer.getExecutor(), curator, metrics, flagSource, secretStore, hostProvisionerProvider, configserverConfig, zone, modelFactoryRegistry, configDefinitionRepo, onnxModelCost, endpointCertificateSecretStores); return ApplicationVersions.fromList(builder.buildModels(session.getApplicationId(), session.getDockerImageRepository(), session.getVespaVersion(), sessionZooKeeperClient.loadApplicationPackage(), new AllocatedHostsFromAllModels(), clock.instant())); } private void nodeChanged() { zkWatcherExecutor.execute(() -> { Multiset<Session.Status> sessionMetrics = HashMultiset.create(); getRemoteSessions().forEach(session -> sessionMetrics.add(session.getStatus())); metricUpdater.setNewSessions(sessionMetrics.count(NEW)); metricUpdater.setPreparedSessions(sessionMetrics.count(PREPARE)); metricUpdater.setActivatedSessions(sessionMetrics.count(ACTIVATE)); metricUpdater.setDeactivatedSessions(sessionMetrics.count(DEACTIVATE)); }); } @SuppressWarnings("unused") private void childEvent(CuratorFramework ignored, PathChildrenCacheEvent event) { zkWatcherExecutor.execute(() -> { log.log(Level.FINE, () -> "Got child event: " + event); switch (event.getType()) { case CHILD_ADDED, CHILD_REMOVED, CONNECTION_RECONNECTED -> sessionsChanged(); } }); } private void write(Session existingSession, LocalSession session, ApplicationId applicationId, Instant created) { SessionSerializer sessionSerializer = new SessionSerializer(); sessionSerializer.write(session.getSessionZooKeeperClient(), applicationId, created, existingSession.getApplicationPackageReference(), existingSession.getDockerImageRepository(), existingSession.getVespaVersion(), existingSession.getAthenzDomain(), existingSession.getQuota(), existingSession.getTenantSecretStores(), existingSession.getOperatorCertificates(), existingSession.getCloudAccount(), existingSession.getDataplaneTokens(), ActivationTriggers.empty(), writeSessionData); } public SessionData read(Session session) { return new SessionSerializer().read(session.getSessionZooKeeperClient(), readSessionData); } public void deleteExpiredSessions(Predicate<Session> sessionIsActiveForApplication) { log.log(Level.FINE, () -> "Deleting expired local sessions for tenant '" + tenantName + "'"); Set<Long> sessionIdsToDelete = new HashSet<>(); Set<Long> newSessions = findNewSessionsInFileSystem(); try { for (long sessionId : getLocalSessionsIdsFromFileSystem()) { if (newSessions.contains(sessionId)) continue; log.log(Level.FINE, () -> "Candidate local session for deletion: " + sessionId + ", created (on disk): " + created(getSessionAppDir(sessionId))); var sessionZooKeeperClient = createSessionZooKeeperClient(sessionId); Instant createTime = sessionZooKeeperClient.readCreateTime(); Session.Status status = sessionZooKeeperClient.readStatus(); var expired = sessionLifeTimeElapsed(createTime); log.log(Level.FINE, () -> "Candidate local session for deletion: " + sessionId + ", created (in zk): " + createTime + ", status " + status + ", can be deleted: " + canBeDeleted(sessionId, status) + ", hasExpired: " + expired); if (expired && canBeDeleted(sessionId, status)) { log.log(Level.FINE, () -> " expired, can be deleted: " + sessionId); sessionIdsToDelete.add(sessionId); } else if (createTime.plus(Duration.ofDays(1)).isBefore(clock.instant())) { LocalSession session; log.log(Level.FINE, () -> "not expired, but more than 1 day old: " + sessionId); try { session = getSessionFromFile(sessionId); } catch (Exception e) { log.log(Level.FINE, () -> "could not get session from file: " + sessionId + ": " + e.getMessage()); continue; } Optional<ApplicationId> applicationId = session.getOptionalApplicationId(); if (applicationId.isEmpty()) continue; if ( ! sessionIsActiveForApplication.test(session)) { sessionIdsToDelete.add(sessionId); log.log(Level.FINE, () -> "Will delete inactive session " + sessionId + " created " + createTime + " for '" + applicationId + "'"); } } } sessionIdsToDelete.forEach(this::deleteLocalSession); } catch (Throwable e) { log.log(Level.WARNING, "Error when purging old sessions ", e); } log.log(Level.FINE, () -> "Done purging old sessions"); } private boolean sessionLifeTimeElapsed(Instant created) { var sessionLifetime = Duration.ofSeconds(configserverConfig.sessionLifetime()); return created.plus(sessionLifetime).isBefore(clock.instant()); } private Optional<LocalSession> getOptionalSessionFromFileSystem(long sessionId) { try { return Optional.of(getSessionFromFile(sessionId)); } catch (Exception e) { log.log(Level.FINE, () -> "could not get session from file: " + sessionId + ": " + e.getMessage()); } return Optional.empty(); } private boolean isOldAndCanBeDeleted(long sessionId, Instant createTime) { Duration oneDay = Duration.ofDays(1); Duration expiry = Duration.ofSeconds(expiryTimeFlag.value()).compareTo(oneDay) >= 0 ? Duration.ofSeconds(expiryTimeFlag.value()) : oneDay; if (createTime.plus(expiry).isBefore(clock.instant())) { log.log(Level.FINE, () -> "more than 1 day old: " + sessionId); return true; } else { return false; } } private boolean hasExpired(Instant created) { Duration expiryTime = Duration.ofSeconds(expiryTimeFlag.value()); return created.plus(expiryTime).isBefore(clock.instant()); } private boolean canBeDeleted(long sessionId, Session.Status status) { return ( ! List.of(UNKNOWN, ACTIVATE).contains(status)) || oldSessionDirWithUnknownStatus(sessionId, status); } private boolean oldSessionDirWithUnknownStatus(long sessionId, Session.Status status) { Duration expiryTime = Duration.ofHours(configserverConfig.keepSessionsWithUnknownStatusHours()); File sessionDir = tenantFileSystemDirs.getUserApplicationDir(sessionId); return sessionDir.exists() && status == UNKNOWN && created(sessionDir).plus(expiryTime).isBefore(clock.instant()); } private Set<Long> findNewSessionsInFileSystem() { File[] sessions = tenantFileSystemDirs.sessionsPath().listFiles(sessionApplicationsFilter); Set<Long> newSessions = new HashSet<>(); if (sessions != null) { for (File session : sessions) { try { if (Files.getLastModifiedTime(session.toPath()).toInstant() .isAfter(clock.instant().minus(Duration.ofSeconds(30)))) newSessions.add(Long.parseLong(session.getName())); } catch (IOException e) { log.log(Level.FINE, "Unable to find last modified time for " + session.toPath()); } } } return newSessions; } private Instant created(File file) { BasicFileAttributes fileAttributes; try { fileAttributes = readAttributes(file.toPath(), BasicFileAttributes.class); return fileAttributes.creationTime().toInstant(); } catch (IOException e) { throw new UncheckedIOException(e); } } private void ensureSessionPathDoesNotExist(long sessionId) { Path sessionPath = getSessionPath(sessionId); if (curator.exists(sessionPath)) { throw new IllegalArgumentException("Path " + sessionPath.getAbsolute() + " already exists in ZooKeeper"); } } private ApplicationPackage createApplication(File userDir, File configApplicationDir, ApplicationId applicationId, long sessionId, Optional<Long> currentlyActiveSessionId, boolean internalRedeploy, Optional<DeployLogger> deployLogger) { long deployTimestamp = System.currentTimeMillis(); DeployData deployData = new DeployData(userDir.getAbsolutePath(), applicationId, deployTimestamp, internalRedeploy, sessionId, currentlyActiveSessionId.orElse(nonExistingActiveSessionId)); FilesApplicationPackage app = FilesApplicationPackage.fromFileWithDeployData(configApplicationDir, deployData); validateFileExtensions(applicationId, deployLogger, app); return app; } private void validateFileExtensions(ApplicationId applicationId, Optional<DeployLogger> deployLogger, FilesApplicationPackage app) { try { app.validateFileExtensions(); } catch (IllegalArgumentException e) { if (configserverConfig.hostedVespa()) { UnboundStringFlag flag = PermanentFlags.APPLICATION_FILES_WITH_UNKNOWN_EXTENSION; String value = flag.bindTo(flagSource).with(INSTANCE_ID, applicationId.serializedForm()).value(); switch (value) { case "FAIL" -> throw new InvalidApplicationException(e); case "LOG" -> deployLogger.ifPresent(logger -> logger.logApplicationPackage(Level.WARNING, e.getMessage())); default -> log.log(Level.WARNING, "Unknown value for flag " + flag.id() + ": " + value); } } else { deployLogger.ifPresent(logger -> logger.logApplicationPackage(Level.WARNING, e.getMessage())); } } } private LocalSession createSessionFromApplication(File applicationDirectory, ApplicationId applicationId, boolean internalRedeploy, TimeoutBudget timeoutBudget, DeployLogger deployLogger, Instant created) { long sessionId = getNextSessionId(); try { ensureSessionPathDoesNotExist(sessionId); ApplicationPackage app = createApplicationPackage(applicationDirectory, applicationId, sessionId, internalRedeploy, Optional.of(deployLogger)); log.log(Level.FINE, () -> TenantRepository.logPre(tenantName) + "Creating session " + sessionId + " in ZooKeeper"); SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); sessionZKClient.createNewSession(created); CompletionWaiter waiter = sessionZKClient.getUploadWaiter(); LocalSession session = new LocalSession(tenantName, sessionId, app, sessionZKClient); waiter.awaitCompletion(Duration.ofSeconds(Math.min(120, timeoutBudget.timeLeft().getSeconds()))); addLocalSession(session); return session; } catch (IOException e) { throw new RuntimeException("Error creating session " + sessionId, e); } } private ApplicationPackage createApplicationPackage(File applicationDirectory, ApplicationId applicationId, long sessionId, boolean internalRedeploy, Optional<DeployLogger> deployLogger) throws IOException { synchronized (monitor) { Optional<Long> activeSessionId = getActiveSessionId(applicationId); File userApplicationDir = getSessionAppDir(sessionId); copyApp(applicationDirectory, userApplicationDir); ApplicationPackage applicationPackage = createApplication(applicationDirectory, userApplicationDir, applicationId, sessionId, activeSessionId, internalRedeploy, deployLogger); applicationPackage.writeMetaData(); return applicationPackage; } } public Optional<ApplicationVersions> activeApplicationVersions(ApplicationId appId) { return applicationRepo.activeSessionOf(appId).flatMap(this::activeApplicationVersions); } private Optional<ApplicationVersions> activeApplicationVersions(long sessionId) { try { return Optional.ofNullable(getRemoteSession(sessionId)).map(this::ensureApplicationLoaded); } catch (IllegalArgumentException e) { return Optional.empty(); } } private void copyApp(File sourceDir, File destinationDir) throws IOException { if (destinationDir.exists()) { log.log(Level.INFO, "Destination dir " + destinationDir + " already exists, app has already been copied"); return; } if (! sourceDir.isDirectory()) throw new IllegalArgumentException(sourceDir.getAbsolutePath() + " is not a directory"); java.nio.file.Path tempDestinationDir = null; try { tempDestinationDir = Files.createTempDirectory(destinationDir.getParentFile().toPath(), "app-package"); log.log(Level.FINE, "Copying dir " + sourceDir.getAbsolutePath() + " to " + tempDestinationDir.toFile().getAbsolutePath()); IOUtils.copyDirectory(sourceDir, tempDestinationDir.toFile()); moveSearchDefinitionsToSchemasDir(tempDestinationDir); log.log(Level.FINE, "Moving " + tempDestinationDir + " to " + destinationDir.getAbsolutePath()); Files.move(tempDestinationDir, destinationDir.toPath(), StandardCopyOption.ATOMIC_MOVE); } finally { if (tempDestinationDir != null) IOUtils.recursiveDeleteDir(tempDestinationDir.toFile()); } } private void moveSearchDefinitionsToSchemasDir(java.nio.file.Path applicationDir) throws IOException { File schemasDir = applicationDir.resolve(ApplicationPackage.SCHEMAS_DIR.getRelative()).toFile(); File sdDir = applicationDir.resolve(ApplicationPackage.SEARCH_DEFINITIONS_DIR.getRelative()).toFile(); if (sdDir.exists() && sdDir.isDirectory()) { try { File[] sdFiles = sdDir.listFiles(); if (sdFiles != null) { Files.createDirectories(schemasDir.toPath()); List.of(sdFiles).forEach(file -> Exceptions.uncheck( () -> Files.move(file.toPath(), schemasDir.toPath().resolve(file.toPath().getFileName()), StandardCopyOption.REPLACE_EXISTING))); } Files.delete(sdDir.toPath()); } catch (IOException | UncheckedIOException e) { if (schemasDir.exists() && schemasDir.isDirectory()) throw new InvalidApplicationException( "Both " + ApplicationPackage.SCHEMAS_DIR.getRelative() + "/ and " + ApplicationPackage.SEARCH_DEFINITIONS_DIR + "/ exist in application package, please remove " + ApplicationPackage.SEARCH_DEFINITIONS_DIR + "/", e); else throw e; } } } /** * Returns a new session instance for the given session id. */ void createSessionFromId(long sessionId) { File sessionDir = getAndValidateExistingSessionAppDir(sessionId); ApplicationPackage applicationPackage = FilesApplicationPackage.fromFile(sessionDir); createLocalSession(sessionId, applicationPackage); } void createLocalSession(long sessionId, ApplicationPackage applicationPackage) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); LocalSession session = new LocalSession(tenantName, sessionId, applicationPackage, sessionZKClient); addLocalSession(session); } /** * Create a new local session for the given session id if it does not already exist and * will add the session to the local session cache. If there is no remote session matching * the session id the remote session will also be created. */ public void createLocalSessionFromDistributedApplicationPackage(long sessionId) { if (applicationRepo.sessionExistsInFileSystem(sessionId)) { log.log(Level.FINE, () -> "Local session for session id " + sessionId + " already exists"); createSessionFromId(sessionId); return; } SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); var fileReference = sessionZKClient.readApplicationPackageReference(); log.log(Level.FINE, () -> "File reference for session id " + sessionId + ": " + fileReference); if (fileReference.isEmpty()) return; Optional<File> sessionDir = fileDistributionFactory.fileDirectory().getFile(fileReference.get()); if (sessionDir.isEmpty()) return; ApplicationId applicationId = sessionZKClient.readApplicationId(); log.log(Level.FINE, () -> "Creating local session for tenant '" + tenantName + "' with session id " + sessionId); createLocalSession(sessionDir.get(), applicationId, sessionId); } private Optional<Long> getActiveSessionId(ApplicationId applicationId) { return applicationRepo.activeSessionOf(applicationId); } private long getNextSessionId() { return sessionCounter.nextSessionId(); } public Path getSessionPath(long sessionId) { return sessionsPath.append(String.valueOf(sessionId)); } Path getSessionStatePath(long sessionId) { return getSessionPath(sessionId).append(ZKApplication.SESSIONSTATE_ZK_SUBPATH); } public SessionZooKeeperClient createSessionZooKeeperClient(long sessionId) { return new SessionZooKeeperClient(curator, tenantName, sessionId, configserverConfig, fileDistributionFactory.createFileManager(getSessionAppDir(sessionId)), maxNodeSize); } private File getAndValidateExistingSessionAppDir(long sessionId) { File appDir = getSessionAppDir(sessionId); if (!appDir.exists() || !appDir.isDirectory()) { throw new IllegalArgumentException("Unable to find correct application directory for session " + sessionId); } return appDir; } private File getSessionAppDir(long sessionId) { return new TenantFileSystemDirs(configServerDB, tenantName).getUserApplicationDir(sessionId); } private void updateSessionStateWatcher(long sessionId) { sessionStateWatchers.computeIfAbsent(sessionId, (id) -> { Curator.FileCache fileCache = curator.createFileCache(getSessionStatePath(id).getAbsolute(), false); fileCache.addListener(this::nodeChanged); return new SessionStateWatcher(fileCache, id, metricUpdater, zkWatcherExecutor, this); }); } @Override public String toString() { return getLocalSessions().toString(); } public Clock clock() { return clock; } public void close() { deleteAllSessions(); tenantFileSystemDirs.delete(); try { if (directoryCache != null) { directoryCache.close(); } } catch (Exception e) { log.log(Level.WARNING, "Exception when closing path cache", e); } finally { checkForRemovedSessions(new ArrayList<>()); } } private void sessionsChanged() throws NumberFormatException { List<Long> sessions = getSessionListFromDirectoryCache(directoryCache.getCurrentData()); checkForRemovedSessions(sessions); checkForAddedSessions(sessions); } private void checkForRemovedSessions(List<Long> existingSessions) { for (Iterator<RemoteSession> it = remoteSessionCache.values().iterator(); it.hasNext(); ) { long sessionId = it.next().sessionId; if (existingSessions.contains(sessionId)) continue; SessionStateWatcher watcher = sessionStateWatchers.remove(sessionId); if (watcher != null) watcher.close(); it.remove(); metricUpdater.incRemovedSessions(); } } private void checkForAddedSessions(List<Long> sessions) { for (Long sessionId : sessions) if (remoteSessionCache.get(sessionId) == null) sessionAdded(sessionId); } public Transaction createActivateTransaction(Session session) { Transaction transaction = createSetStatusTransaction(session, ACTIVATE); transaction.add(applicationRepo.createWriteActiveTransaction(transaction, session.getApplicationId(), session.getSessionId()).operations()); return transaction; } public Transaction createSetStatusTransaction(Session session, Session.Status status) { return session.sessionZooKeeperClient.createWriteStatusTransaction(status); } void setPrepared(Session session) { session.setStatus(PREPARE); } private static class FileTransaction extends AbstractTransaction { public static FileTransaction from(FileOperation operation) { FileTransaction transaction = new FileTransaction(); transaction.add(operation); return transaction; } @Override public void prepare() { } @Override public void commit() { for (Operation operation : operations()) ((FileOperation)operation).commit(); } } /** Factory for file operations */ private static class FileOperations { /** Creates an operation which recursively deletes the given path */ public static DeleteOperation delete(String pathToDelete) { return new DeleteOperation(pathToDelete); } } private interface FileOperation extends Transaction.Operation { void commit(); } /** * Recursively deletes this path and everything below. * Succeeds with no action if the path does not exist. */ private static class DeleteOperation implements FileOperation { private final String pathToDelete; DeleteOperation(String pathToDelete) { this.pathToDelete = pathToDelete; } @Override public void commit() { IOUtils.recursiveDeleteDir(new File(pathToDelete)); } } }
class SessionRepository { private static final Logger log = Logger.getLogger(SessionRepository.class.getName()); private static final FilenameFilter sessionApplicationsFilter = (dir, name) -> name.matches("\\d+"); private static final long nonExistingActiveSessionId = 0; private final Object monitor = new Object(); private final Map<Long, LocalSession> localSessionCache = Collections.synchronizedMap(new HashMap<>()); private final Map<Long, RemoteSession> remoteSessionCache = Collections.synchronizedMap(new HashMap<>()); private final Map<Long, SessionStateWatcher> sessionStateWatchers = Collections.synchronizedMap(new HashMap<>()); private final Clock clock; private final Curator curator; private final Executor zkWatcherExecutor; private final FileDistributionFactory fileDistributionFactory; private final FlagSource flagSource; private final TenantFileSystemDirs tenantFileSystemDirs; private final Metrics metrics; private final MetricUpdater metricUpdater; private final Curator.DirectoryCache directoryCache; private final TenantApplications applicationRepo; private final SessionPreparer sessionPreparer; private final Path sessionsPath; private final TenantName tenantName; private final OnnxModelCost onnxModelCost; private final List<EndpointCertificateSecretStore> endpointCertificateSecretStores; private final SessionCounter sessionCounter; private final SecretStore secretStore; private final HostProvisionerProvider hostProvisionerProvider; private final ConfigserverConfig configserverConfig; private final ConfigServerDB configServerDB; private final Zone zone; private final ModelFactoryRegistry modelFactoryRegistry; private final ConfigDefinitionRepo configDefinitionRepo; private final int maxNodeSize; private final LongFlag expiryTimeFlag; private final BooleanFlag writeSessionData; private final BooleanFlag readSessionData; public SessionRepository(TenantName tenantName, TenantApplications applicationRepo, SessionPreparer sessionPreparer, Curator curator, Metrics metrics, StripedExecutor<TenantName> zkWatcherExecutor, FileDistributionFactory fileDistributionFactory, FlagSource flagSource, ExecutorService zkCacheExecutor, SecretStore secretStore, HostProvisionerProvider hostProvisionerProvider, ConfigserverConfig configserverConfig, ConfigServerDB configServerDB, Zone zone, Clock clock, ModelFactoryRegistry modelFactoryRegistry, ConfigDefinitionRepo configDefinitionRepo, int maxNodeSize, OnnxModelCost onnxModelCost, List<EndpointCertificateSecretStore> endpointCertificateSecretStores) { this.tenantName = tenantName; this.onnxModelCost = onnxModelCost; this.endpointCertificateSecretStores = endpointCertificateSecretStores; sessionCounter = new SessionCounter(curator, tenantName); this.sessionsPath = TenantRepository.getSessionsPath(tenantName); this.clock = clock; this.curator = curator; this.zkWatcherExecutor = command -> zkWatcherExecutor.execute(tenantName, command); this.fileDistributionFactory = fileDistributionFactory; this.flagSource = flagSource; this.tenantFileSystemDirs = new TenantFileSystemDirs(configServerDB, tenantName); this.applicationRepo = applicationRepo; this.sessionPreparer = sessionPreparer; this.metrics = metrics; this.metricUpdater = metrics.getOrCreateMetricUpdater(Metrics.createDimensions(tenantName)); this.secretStore = secretStore; this.hostProvisionerProvider = hostProvisionerProvider; this.configserverConfig = configserverConfig; this.configServerDB = configServerDB; this.zone = zone; this.modelFactoryRegistry = modelFactoryRegistry; this.configDefinitionRepo = configDefinitionRepo; this.maxNodeSize = maxNodeSize; this.expiryTimeFlag = PermanentFlags.CONFIG_SERVER_SESSION_EXPIRY_TIME.bindTo(flagSource); this.writeSessionData = Flags.WRITE_CONFIG_SERVER_SESSION_DATA_AS_ONE_BLOB.bindTo(flagSource); this.readSessionData = Flags.READ_CONFIG_SERVER_SESSION_DATA_AS_ONE_BLOB.bindTo(flagSource); loadSessions(); this.directoryCache = curator.createDirectoryCache(sessionsPath.getAbsolute(), false, false, zkCacheExecutor); this.directoryCache.addListener(this::childEvent); this.directoryCache.start(); } private void loadSessions() { ExecutorService executor = Executors.newFixedThreadPool(Math.max(8, Runtime.getRuntime().availableProcessors()), new DaemonThreadFactory("load-sessions-")); loadSessions(executor); } void loadSessions(ExecutorService executor) { loadRemoteSessions(executor); try { executor.shutdown(); if ( ! executor.awaitTermination(1, TimeUnit.MINUTES)) log.log(Level.INFO, "Executor did not terminate"); } catch (InterruptedException e) { log.log(Level.WARNING, "Shutdown of executor for loading sessions failed: " + Exceptions.toMessageString(e)); } } public void addLocalSession(LocalSession session) { long sessionId = session.getSessionId(); localSessionCache.put(sessionId, session); if (remoteSessionCache.get(sessionId) == null) createRemoteSession(sessionId); } public LocalSession getLocalSession(long sessionId) { return localSessionCache.get(sessionId); } /** Returns a copy of local sessions */ public Collection<LocalSession> getLocalSessions() { return List.copyOf(localSessionCache.values()); } private LocalSession getSessionFromFile(long sessionId) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); File sessionDir = getAndValidateExistingSessionAppDir(sessionId); ApplicationPackage applicationPackage = FilesApplicationPackage.fromFile(sessionDir); return new LocalSession(tenantName, sessionId, applicationPackage, sessionZKClient); } public Set<Long> getLocalSessionsIdsFromFileSystem() { File[] sessions = tenantFileSystemDirs.sessionsPath().listFiles(sessionApplicationsFilter); if (sessions == null) return Set.of(); Set<Long> sessionIds = new HashSet<>(); for (File session : sessions) { long sessionId = Long.parseLong(session.getName()); sessionIds.add(sessionId); } return sessionIds; } public ConfigChangeActions prepareLocalSession(Session session, DeployLogger logger, PrepareParams params, Instant now) { params.vespaVersion().ifPresent(version -> { if ( ! params.isBootstrap() && ! modelFactoryRegistry.allVersions().contains(version)) throw new UnknownVespaVersionException("Vespa version '" + version + "' not known by this config server"); }); ApplicationId applicationId = params.getApplicationId(); applicationRepo.createApplication(applicationId); logger.log(Level.FINE, "Created application " + applicationId); long sessionId = session.getSessionId(); SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(sessionId); Optional<CompletionWaiter> waiter = params.isDryRun() ? Optional.empty() : Optional.of(sessionZooKeeperClient.createPrepareWaiter()); Optional<ApplicationVersions> activeApplicationVersions = activeApplicationVersions(applicationId); try (var transaction = new CuratorTransaction(curator)) { applicationRepo.createWritePrepareTransaction(transaction, applicationId, sessionId, getActiveSessionId(applicationId)) .commit(); } ConfigChangeActions actions = sessionPreparer.prepare(applicationRepo, logger, params, activeApplicationVersions, now, getSessionAppDir(sessionId), session.getApplicationPackage(), sessionZooKeeperClient) .getConfigChangeActions(); setPrepared(session); waiter.ifPresent(w -> w.awaitCompletion(params.getTimeoutBudget().timeLeft())); return actions; } /** * Creates a new deployment session from an already existing session. * * @param existingSession the session to use as base * @param internalRedeploy whether this session is for a system internal redeploy — not an application package change * @param timeoutBudget timeout for creating session and waiting for other servers. * @return a new session */ public LocalSession createSessionFromExisting(Session existingSession, boolean internalRedeploy, TimeoutBudget timeoutBudget, DeployLogger deployLogger) { ApplicationId applicationId = existingSession.getApplicationId(); File existingApp = getSessionAppDir(existingSession.getSessionId()); Instant created = clock.instant(); LocalSession session = createSessionFromApplication(existingApp, applicationId, internalRedeploy, timeoutBudget, deployLogger, created); applicationRepo.createApplication(applicationId); write(existingSession, session, applicationId, created); return session; } /** * Creates a new deployment session from an application package. * * @param applicationDirectory a File pointing to an application. * @param applicationId application id for this new session. * @param timeoutBudget Timeout for creating session and waiting for other servers. * @return a new session */ public LocalSession createSessionFromApplicationPackage(File applicationDirectory, ApplicationId applicationId, TimeoutBudget timeoutBudget, DeployLogger deployLogger) { LocalSession session = createSessionFromApplication(applicationDirectory, applicationId, false, timeoutBudget, deployLogger, clock.instant()); applicationRepo.createApplication(applicationId); return session; } /** * Creates a local session based on a remote session and the distributed application package. * Does not wait for session being created on other servers. */ private void createLocalSession(File applicationFile, ApplicationId applicationId, long sessionId) { try { ApplicationPackage applicationPackage = createApplicationPackage(applicationFile, applicationId, sessionId, false, Optional.empty()); createLocalSession(sessionId, applicationPackage); } catch (Exception e) { throw new RuntimeException("Error creating session " + sessionId, e); } } public void deleteLocalSession(long sessionId) { log.log(Level.FINE, () -> "Deleting local session " + sessionId); SessionStateWatcher watcher = sessionStateWatchers.remove(sessionId); if (watcher != null) watcher.close(); localSessionCache.remove(sessionId); NestedTransaction transaction = new NestedTransaction(); transaction.add(FileTransaction.from(FileOperations.delete(getSessionAppDir(sessionId).getAbsolutePath()))); transaction.commit(); } private void deleteAllSessions() { for (LocalSession session : getLocalSessions()) { deleteLocalSession(session.getSessionId()); } } public RemoteSession getRemoteSession(long sessionId) { return remoteSessionCache.get(sessionId); } /** Returns a copy of remote sessions */ public Collection<RemoteSession> getRemoteSessions() { return List.copyOf(remoteSessionCache.values()); } public List<Long> getRemoteSessionsFromZooKeeper() { return getSessionList(curator.getChildren(sessionsPath)); } public RemoteSession createRemoteSession(long sessionId) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); RemoteSession session = new RemoteSession(tenantName, sessionId, sessionZKClient); loadSessionIfActive(session); remoteSessionCache.put(sessionId, session); updateSessionStateWatcher(sessionId); return session; } public int deleteExpiredRemoteSessions(Predicate<Session> sessionIsActiveForApplication) { List<Long> remoteSessionsFromZooKeeper = getRemoteSessionsFromZooKeeper(); log.log(Level.FINE, () -> "Remote sessions for tenant " + tenantName + ": " + remoteSessionsFromZooKeeper); int deleted = 0; int deleteMax = (int) Math.min(1000, Math.max(50, remoteSessionsFromZooKeeper.size() * 0.05)); for (Long sessionId : remoteSessionsFromZooKeeper) { Session session = remoteSessionCache.get(sessionId); if (session == null) session = new RemoteSession(tenantName, sessionId, createSessionZooKeeperClient(sessionId)); if (session.getStatus() == Session.Status.ACTIVATE && sessionIsActiveForApplication.test(session)) continue; if (sessionHasExpired(session.getCreateTime())) { log.log(Level.FINE, () -> "Remote session " + sessionId + " for " + tenantName + " has expired, deleting it"); deleteRemoteSessionFromZooKeeper(session); deleted++; } if (deleted >= deleteMax) break; } return deleted; } public void deactivateSession(long sessionId) { var s = remoteSessionCache.get(sessionId); if (s == null) return; remoteSessionCache.put(sessionId, s.deactivated()); } public void deleteRemoteSessionFromZooKeeper(Session session) { SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(session.getSessionId()); Transaction transaction = sessionZooKeeperClient.deleteTransaction(); transaction.commit(); transaction.close(); } private boolean sessionHasExpired(Instant created) { var expiryTime = Duration.ofSeconds(expiryTimeFlag.value()); return created.plus(expiryTime).isBefore(clock.instant()); } private List<Long> getSessionListFromDirectoryCache(List<ChildData> children) { return getSessionList(children.stream() .map(child -> Path.fromString(child.getPath()).getName()) .toList()); } private List<Long> getSessionList(List<String> children) { return children.stream().map(Long::parseLong).toList(); } private void loadRemoteSessions(ExecutorService executor) throws NumberFormatException { Map<Long, Future<?>> futures = new HashMap<>(); for (long sessionId : getRemoteSessionsFromZooKeeper()) { futures.put(sessionId, executor.submit(() -> sessionAdded(sessionId))); } futures.forEach((sessionId, future) -> { try { future.get(); log.log(Level.FINE, () -> "Remote session " + sessionId + " loaded"); } catch (ExecutionException | InterruptedException e) { throw new RuntimeException("Could not load remote session " + sessionId, e); } }); } /** * A session for which we don't have a watcher, i.e. hitherto unknown to us. * * @param sessionId session id for the new session */ public void sessionAdded(long sessionId) { if (hasStatusDeleted(sessionId)) return; log.log(Level.FINE, () -> "Adding remote session " + sessionId); Session session = createRemoteSession(sessionId); if (session.getStatus() == NEW) { log.log(Level.FINE, () -> session.logPre() + "Confirming upload for session " + sessionId); confirmUpload(session); } createLocalSessionFromDistributedApplicationPackage(sessionId); } private boolean hasStatusDeleted(long sessionId) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); RemoteSession session = new RemoteSession(tenantName, sessionId, sessionZKClient); return session.getStatus() == Session.Status.DELETE; } void activate(long sessionId) { createLocalSessionFromDistributedApplicationPackage(sessionId); RemoteSession session = remoteSessionCache.get(sessionId); if (session == null) return; CompletionWaiter waiter = createSessionZooKeeperClient(sessionId).getActiveWaiter(); log.log(Level.FINE, () -> session.logPre() + "Activating " + sessionId); applicationRepo.activateApplication(ensureApplicationLoaded(session), sessionId); log.log(Level.FINE, () -> session.logPre() + "Notifying " + waiter); notifyCompletion(waiter); log.log(Level.INFO, session.logPre() + "Session activated: " + sessionId); } private void loadSessionIfActive(RemoteSession session) { for (ApplicationId applicationId : applicationRepo.activeApplications()) { Optional<Long> activeSession = applicationRepo.activeSessionOf(applicationId); if (activeSession.isPresent() && activeSession.get() == session.getSessionId()) { log.log(Level.FINE, () -> "Found active application for session " + session.getSessionId() + " , loading it"); applicationRepo.activateApplication(ensureApplicationLoaded(session), session.getSessionId()); log.log(Level.INFO, session.logPre() + "Application activated successfully: " + applicationId + " (generation " + session.getSessionId() + ")"); return; } } } void prepareRemoteSession(long sessionId) { createLocalSessionFromDistributedApplicationPackage(sessionId); RemoteSession session = remoteSessionCache.get(sessionId); if (session == null) return; SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(sessionId); CompletionWaiter waiter = sessionZooKeeperClient.getPrepareWaiter(); ensureApplicationLoaded(session); notifyCompletion(waiter); } public ApplicationVersions ensureApplicationLoaded(RemoteSession session) { if (session.applicationVersions().isPresent()) { return session.applicationVersions().get(); } Optional<Long> activeSessionId = getActiveSessionId(session.getApplicationId()); Optional<ApplicationVersions> previousActiveApplicationVersions = activeSessionId.filter(session::isNewerThan) .flatMap(this::activeApplicationVersions); ApplicationVersions applicationVersions = loadApplication(session, previousActiveApplicationVersions); RemoteSession activated = session.activated(applicationVersions); long sessionId = activated.getSessionId(); remoteSessionCache.put(sessionId, activated); updateSessionStateWatcher(sessionId); return applicationVersions; } void confirmUpload(Session session) { CompletionWaiter waiter = createSessionZooKeeperClient(session.getSessionId()).getUploadWaiter(); long sessionId = session.getSessionId(); log.log(Level.FINE, () -> "Notifying upload waiter for session " + sessionId); notifyCompletion(waiter); log.log(Level.FINE, () -> "Done notifying upload for session " + sessionId); } void notifyCompletion(CompletionWaiter completionWaiter) { try { completionWaiter.notifyCompletion(); } catch (RuntimeException e) { Set<Class<? extends KeeperException>> acceptedExceptions = Set.of(KeeperException.NoNodeException.class, KeeperException.NodeExistsException.class); Class<? extends Throwable> exceptionClass = e.getCause().getClass(); if (acceptedExceptions.contains(exceptionClass)) log.log(Level.FINE, () -> "Not able to notify completion for session (" + completionWaiter + ")," + " node " + (exceptionClass.equals(KeeperException.NoNodeException.class) ? "has been deleted" : "already exists")); else throw e; } } private ApplicationVersions loadApplication(Session session, Optional<ApplicationVersions> previousApplicationVersions) { log.log(Level.FINE, () -> "Loading application for " + session); SessionZooKeeperClient sessionZooKeeperClient = createSessionZooKeeperClient(session.getSessionId()); ActivatedModelsBuilder builder = new ActivatedModelsBuilder(session.getTenantName(), session.getSessionId(), sessionZooKeeperClient, previousApplicationVersions, sessionPreparer.getExecutor(), curator, metrics, flagSource, secretStore, hostProvisionerProvider, configserverConfig, zone, modelFactoryRegistry, configDefinitionRepo, onnxModelCost, endpointCertificateSecretStores); return ApplicationVersions.fromList(builder.buildModels(session.getApplicationId(), session.getDockerImageRepository(), session.getVespaVersion(), sessionZooKeeperClient.loadApplicationPackage(), new AllocatedHostsFromAllModels(), clock.instant())); } private void nodeChanged() { zkWatcherExecutor.execute(() -> { Multiset<Session.Status> sessionMetrics = HashMultiset.create(); getRemoteSessions().forEach(session -> sessionMetrics.add(session.getStatus())); metricUpdater.setNewSessions(sessionMetrics.count(NEW)); metricUpdater.setPreparedSessions(sessionMetrics.count(PREPARE)); metricUpdater.setActivatedSessions(sessionMetrics.count(ACTIVATE)); metricUpdater.setDeactivatedSessions(sessionMetrics.count(DEACTIVATE)); }); } @SuppressWarnings("unused") private void childEvent(CuratorFramework ignored, PathChildrenCacheEvent event) { zkWatcherExecutor.execute(() -> { log.log(Level.FINE, () -> "Got child event: " + event); switch (event.getType()) { case CHILD_ADDED, CHILD_REMOVED, CONNECTION_RECONNECTED -> sessionsChanged(); } }); } private void write(Session existingSession, LocalSession session, ApplicationId applicationId, Instant created) { SessionSerializer sessionSerializer = new SessionSerializer(); sessionSerializer.write(session.getSessionZooKeeperClient(), applicationId, created, existingSession.getApplicationPackageReference(), existingSession.getDockerImageRepository(), existingSession.getVespaVersion(), existingSession.getAthenzDomain(), existingSession.getQuota(), existingSession.getTenantSecretStores(), existingSession.getOperatorCertificates(), existingSession.getCloudAccount(), existingSession.getDataplaneTokens(), ActivationTriggers.empty(), writeSessionData); } public SessionData read(Session session) { return new SessionSerializer().read(session.getSessionZooKeeperClient(), readSessionData); } public void deleteExpiredSessions(Predicate<Session> sessionIsActiveForApplication) { log.log(Level.FINE, () -> "Deleting expired local sessions for tenant '" + tenantName + "'"); Set<Long> sessionIdsToDelete = new HashSet<>(); Set<Long> newSessions = findNewSessionsInFileSystem(); try { for (long sessionId : getLocalSessionsIdsFromFileSystem()) { if (newSessions.contains(sessionId)) continue; log.log(Level.FINE, () -> "Candidate local session for deletion: " + sessionId + ", created (on disk): " + created(getSessionAppDir(sessionId))); var sessionZooKeeperClient = createSessionZooKeeperClient(sessionId); Instant createTime = sessionZooKeeperClient.readCreateTime(); Session.Status status = sessionZooKeeperClient.readStatus(); var expired = sessionLifeTimeElapsed(createTime); log.log(Level.FINE, () -> "Candidate local session for deletion: " + sessionId + ", created (in zk): " + createTime + ", status " + status + ", can be deleted: " + canBeDeleted(sessionId, status) + ", hasExpired: " + expired); if (expired && canBeDeleted(sessionId, status)) { log.log(Level.FINE, () -> " expired, can be deleted: " + sessionId); sessionIdsToDelete.add(sessionId); } else if (createTime.plus(Duration.ofDays(1)).isBefore(clock.instant())) { LocalSession session; log.log(Level.FINE, () -> "not expired, but more than 1 day old: " + sessionId); try { session = getSessionFromFile(sessionId); } catch (Exception e) { log.log(Level.FINE, () -> "could not get session from file: " + sessionId + ": " + e.getMessage()); continue; } Optional<ApplicationId> applicationId = session.getOptionalApplicationId(); if (applicationId.isEmpty()) continue; if ( ! sessionIsActiveForApplication.test(session)) { sessionIdsToDelete.add(sessionId); log.log(Level.FINE, () -> "Will delete inactive session " + sessionId + " created " + createTime + " for '" + applicationId + "'"); } } } sessionIdsToDelete.forEach(this::deleteLocalSession); } catch (Throwable e) { log.log(Level.WARNING, "Error when purging old sessions ", e); } log.log(Level.FINE, () -> "Done purging old sessions"); } private boolean sessionLifeTimeElapsed(Instant created) { var sessionLifetime = Duration.ofSeconds(configserverConfig.sessionLifetime()); return created.plus(sessionLifetime).isBefore(clock.instant()); } private Optional<LocalSession> getOptionalSessionFromFileSystem(long sessionId) { try { return Optional.of(getSessionFromFile(sessionId)); } catch (Exception e) { log.log(Level.FINE, () -> "could not get session from file: " + sessionId + ": " + e.getMessage()); } return Optional.empty(); } private boolean isOldAndCanBeDeleted(long sessionId, Instant createTime) { Duration oneDay = Duration.ofDays(1); Duration expiry = Duration.ofSeconds(expiryTimeFlag.value()).compareTo(oneDay) >= 0 ? Duration.ofSeconds(expiryTimeFlag.value()) : oneDay; if (createTime.plus(expiry).isBefore(clock.instant())) { log.log(Level.FINE, () -> "more than 1 day old: " + sessionId); return true; } else { return false; } } private boolean hasExpired(Instant created) { Duration expiryTime = Duration.ofSeconds(expiryTimeFlag.value()); return created.plus(expiryTime).isBefore(clock.instant()); } private boolean canBeDeleted(long sessionId, Session.Status status) { return ( ! List.of(UNKNOWN, ACTIVATE).contains(status)) || oldSessionDirWithUnknownStatus(sessionId, status); } private boolean oldSessionDirWithUnknownStatus(long sessionId, Session.Status status) { Duration expiryTime = Duration.ofHours(configserverConfig.keepSessionsWithUnknownStatusHours()); File sessionDir = tenantFileSystemDirs.getUserApplicationDir(sessionId); return sessionDir.exists() && status == UNKNOWN && created(sessionDir).plus(expiryTime).isBefore(clock.instant()); } private Set<Long> findNewSessionsInFileSystem() { File[] sessions = tenantFileSystemDirs.sessionsPath().listFiles(sessionApplicationsFilter); Set<Long> newSessions = new HashSet<>(); if (sessions != null) { for (File session : sessions) { try { if (Files.getLastModifiedTime(session.toPath()).toInstant() .isAfter(clock.instant().minus(Duration.ofSeconds(30)))) newSessions.add(Long.parseLong(session.getName())); } catch (IOException e) { log.log(Level.FINE, "Unable to find last modified time for " + session.toPath()); } } } return newSessions; } private Instant created(File file) { BasicFileAttributes fileAttributes; try { fileAttributes = readAttributes(file.toPath(), BasicFileAttributes.class); return fileAttributes.creationTime().toInstant(); } catch (IOException e) { throw new UncheckedIOException(e); } } private void ensureSessionPathDoesNotExist(long sessionId) { Path sessionPath = getSessionPath(sessionId); if (curator.exists(sessionPath)) { throw new IllegalArgumentException("Path " + sessionPath.getAbsolute() + " already exists in ZooKeeper"); } } private ApplicationPackage createApplication(File userDir, File configApplicationDir, ApplicationId applicationId, long sessionId, Optional<Long> currentlyActiveSessionId, boolean internalRedeploy, Optional<DeployLogger> deployLogger) { long deployTimestamp = System.currentTimeMillis(); DeployData deployData = new DeployData(userDir.getAbsolutePath(), applicationId, deployTimestamp, internalRedeploy, sessionId, currentlyActiveSessionId.orElse(nonExistingActiveSessionId)); FilesApplicationPackage app = FilesApplicationPackage.fromFileWithDeployData(configApplicationDir, deployData); validateFileExtensions(applicationId, deployLogger, app); return app; } private void validateFileExtensions(ApplicationId applicationId, Optional<DeployLogger> deployLogger, FilesApplicationPackage app) { try { app.validateFileExtensions(); } catch (IllegalArgumentException e) { if (configserverConfig.hostedVespa()) { UnboundStringFlag flag = PermanentFlags.APPLICATION_FILES_WITH_UNKNOWN_EXTENSION; String value = flag.bindTo(flagSource).with(INSTANCE_ID, applicationId.serializedForm()).value(); switch (value) { case "FAIL" -> throw new InvalidApplicationException(e); case "LOG" -> deployLogger.ifPresent(logger -> logger.logApplicationPackage(Level.WARNING, e.getMessage())); default -> log.log(Level.WARNING, "Unknown value for flag " + flag.id() + ": " + value); } } else { deployLogger.ifPresent(logger -> logger.logApplicationPackage(Level.WARNING, e.getMessage())); } } } private LocalSession createSessionFromApplication(File applicationDirectory, ApplicationId applicationId, boolean internalRedeploy, TimeoutBudget timeoutBudget, DeployLogger deployLogger, Instant created) { long sessionId = getNextSessionId(); try { ensureSessionPathDoesNotExist(sessionId); ApplicationPackage app = createApplicationPackage(applicationDirectory, applicationId, sessionId, internalRedeploy, Optional.of(deployLogger)); log.log(Level.FINE, () -> TenantRepository.logPre(tenantName) + "Creating session " + sessionId + " in ZooKeeper"); SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); sessionZKClient.createNewSession(created); CompletionWaiter waiter = sessionZKClient.getUploadWaiter(); LocalSession session = new LocalSession(tenantName, sessionId, app, sessionZKClient); waiter.awaitCompletion(Duration.ofSeconds(Math.min(120, timeoutBudget.timeLeft().getSeconds()))); addLocalSession(session); return session; } catch (IOException e) { throw new RuntimeException("Error creating session " + sessionId, e); } } private ApplicationPackage createApplicationPackage(File applicationDirectory, ApplicationId applicationId, long sessionId, boolean internalRedeploy, Optional<DeployLogger> deployLogger) throws IOException { synchronized (monitor) { Optional<Long> activeSessionId = getActiveSessionId(applicationId); File userApplicationDir = getSessionAppDir(sessionId); copyApp(applicationDirectory, userApplicationDir); ApplicationPackage applicationPackage = createApplication(applicationDirectory, userApplicationDir, applicationId, sessionId, activeSessionId, internalRedeploy, deployLogger); applicationPackage.writeMetaData(); return applicationPackage; } } public Optional<ApplicationVersions> activeApplicationVersions(ApplicationId appId) { return applicationRepo.activeSessionOf(appId).flatMap(this::activeApplicationVersions); } private Optional<ApplicationVersions> activeApplicationVersions(long sessionId) { try { return Optional.ofNullable(getRemoteSession(sessionId)).map(this::ensureApplicationLoaded); } catch (IllegalArgumentException e) { return Optional.empty(); } } private void copyApp(File sourceDir, File destinationDir) throws IOException { if (destinationDir.exists()) { log.log(Level.INFO, "Destination dir " + destinationDir + " already exists, app has already been copied"); return; } if (! sourceDir.isDirectory()) throw new IllegalArgumentException(sourceDir.getAbsolutePath() + " is not a directory"); java.nio.file.Path tempDestinationDir = null; try { tempDestinationDir = Files.createTempDirectory(destinationDir.getParentFile().toPath(), "app-package"); log.log(Level.FINE, "Copying dir " + sourceDir.getAbsolutePath() + " to " + tempDestinationDir.toFile().getAbsolutePath()); IOUtils.copyDirectory(sourceDir, tempDestinationDir.toFile()); moveSearchDefinitionsToSchemasDir(tempDestinationDir); log.log(Level.FINE, "Moving " + tempDestinationDir + " to " + destinationDir.getAbsolutePath()); Files.move(tempDestinationDir, destinationDir.toPath(), StandardCopyOption.ATOMIC_MOVE); } finally { if (tempDestinationDir != null) IOUtils.recursiveDeleteDir(tempDestinationDir.toFile()); } } private void moveSearchDefinitionsToSchemasDir(java.nio.file.Path applicationDir) throws IOException { File schemasDir = applicationDir.resolve(ApplicationPackage.SCHEMAS_DIR.getRelative()).toFile(); File sdDir = applicationDir.resolve(ApplicationPackage.SEARCH_DEFINITIONS_DIR.getRelative()).toFile(); if (sdDir.exists() && sdDir.isDirectory()) { try { File[] sdFiles = sdDir.listFiles(); if (sdFiles != null) { Files.createDirectories(schemasDir.toPath()); List.of(sdFiles).forEach(file -> Exceptions.uncheck( () -> Files.move(file.toPath(), schemasDir.toPath().resolve(file.toPath().getFileName()), StandardCopyOption.REPLACE_EXISTING))); } Files.delete(sdDir.toPath()); } catch (IOException | UncheckedIOException e) { if (schemasDir.exists() && schemasDir.isDirectory()) throw new InvalidApplicationException( "Both " + ApplicationPackage.SCHEMAS_DIR.getRelative() + "/ and " + ApplicationPackage.SEARCH_DEFINITIONS_DIR + "/ exist in application package, please remove " + ApplicationPackage.SEARCH_DEFINITIONS_DIR + "/", e); else throw e; } } } /** * Returns a new session instance for the given session id. */ void createSessionFromId(long sessionId) { File sessionDir = getAndValidateExistingSessionAppDir(sessionId); ApplicationPackage applicationPackage = FilesApplicationPackage.fromFile(sessionDir); createLocalSession(sessionId, applicationPackage); } void createLocalSession(long sessionId, ApplicationPackage applicationPackage) { SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); LocalSession session = new LocalSession(tenantName, sessionId, applicationPackage, sessionZKClient); addLocalSession(session); } /** * Create a new local session for the given session id if it does not already exist and * will add the session to the local session cache. If there is no remote session matching * the session id the remote session will also be created. */ public void createLocalSessionFromDistributedApplicationPackage(long sessionId) { if (applicationRepo.sessionExistsInFileSystem(sessionId)) { log.log(Level.FINE, () -> "Local session for session id " + sessionId + " already exists"); createSessionFromId(sessionId); return; } SessionZooKeeperClient sessionZKClient = createSessionZooKeeperClient(sessionId); var fileReference = sessionZKClient.readApplicationPackageReference(); log.log(Level.FINE, () -> "File reference for session id " + sessionId + ": " + fileReference); if (fileReference.isEmpty()) return; Optional<File> sessionDir = fileDistributionFactory.fileDirectory().getFile(fileReference.get()); if (sessionDir.isEmpty()) return; ApplicationId applicationId = sessionZKClient.readApplicationId(); log.log(Level.FINE, () -> "Creating local session for tenant '" + tenantName + "' with session id " + sessionId); createLocalSession(sessionDir.get(), applicationId, sessionId); } private Optional<Long> getActiveSessionId(ApplicationId applicationId) { return applicationRepo.activeSessionOf(applicationId); } private long getNextSessionId() { return sessionCounter.nextSessionId(); } public Path getSessionPath(long sessionId) { return sessionsPath.append(String.valueOf(sessionId)); } Path getSessionStatePath(long sessionId) { return getSessionPath(sessionId).append(ZKApplication.SESSIONSTATE_ZK_SUBPATH); } public SessionZooKeeperClient createSessionZooKeeperClient(long sessionId) { return new SessionZooKeeperClient(curator, tenantName, sessionId, configserverConfig, fileDistributionFactory.createFileManager(getSessionAppDir(sessionId)), maxNodeSize); } private File getAndValidateExistingSessionAppDir(long sessionId) { File appDir = getSessionAppDir(sessionId); if (!appDir.exists() || !appDir.isDirectory()) { throw new IllegalArgumentException("Unable to find correct application directory for session " + sessionId); } return appDir; } private File getSessionAppDir(long sessionId) { return new TenantFileSystemDirs(configServerDB, tenantName).getUserApplicationDir(sessionId); } private void updateSessionStateWatcher(long sessionId) { sessionStateWatchers.computeIfAbsent(sessionId, (id) -> { Curator.FileCache fileCache = curator.createFileCache(getSessionStatePath(id).getAbsolute(), false); fileCache.addListener(this::nodeChanged); return new SessionStateWatcher(fileCache, id, metricUpdater, zkWatcherExecutor, this); }); } @Override public String toString() { return getLocalSessions().toString(); } public Clock clock() { return clock; } public void close() { deleteAllSessions(); tenantFileSystemDirs.delete(); try { if (directoryCache != null) { directoryCache.close(); } } catch (Exception e) { log.log(Level.WARNING, "Exception when closing path cache", e); } finally { checkForRemovedSessions(new ArrayList<>()); } } private void sessionsChanged() throws NumberFormatException { List<Long> sessions = getSessionListFromDirectoryCache(directoryCache.getCurrentData()); checkForRemovedSessions(sessions); checkForAddedSessions(sessions); } private void checkForRemovedSessions(List<Long> existingSessions) { for (Iterator<RemoteSession> it = remoteSessionCache.values().iterator(); it.hasNext(); ) { long sessionId = it.next().sessionId; if (existingSessions.contains(sessionId)) continue; SessionStateWatcher watcher = sessionStateWatchers.remove(sessionId); if (watcher != null) watcher.close(); it.remove(); metricUpdater.incRemovedSessions(); } } private void checkForAddedSessions(List<Long> sessions) { for (Long sessionId : sessions) if (remoteSessionCache.get(sessionId) == null) sessionAdded(sessionId); } public Transaction createActivateTransaction(Session session) { Transaction transaction = createSetStatusTransaction(session, ACTIVATE); transaction.add(applicationRepo.createWriteActiveTransaction(transaction, session.getApplicationId(), session.getSessionId()).operations()); return transaction; } public Transaction createSetStatusTransaction(Session session, Session.Status status) { return session.sessionZooKeeperClient.createWriteStatusTransaction(status); } void setPrepared(Session session) { session.setStatus(PREPARE); } private static class FileTransaction extends AbstractTransaction { public static FileTransaction from(FileOperation operation) { FileTransaction transaction = new FileTransaction(); transaction.add(operation); return transaction; } @Override public void prepare() { } @Override public void commit() { for (Operation operation : operations()) ((FileOperation)operation).commit(); } } /** Factory for file operations */ private static class FileOperations { /** Creates an operation which recursively deletes the given path */ public static DeleteOperation delete(String pathToDelete) { return new DeleteOperation(pathToDelete); } } private interface FileOperation extends Transaction.Operation { void commit(); } /** * Recursively deletes this path and everything below. * Succeeds with no action if the path does not exist. */ private static class DeleteOperation implements FileOperation { private final String pathToDelete; DeleteOperation(String pathToDelete) { this.pathToDelete = pathToDelete; } @Override public void commit() { IOUtils.recursiveDeleteDir(new File(pathToDelete)); } } }
`ELSE NULL` makes more sense. But since it's time consuming to change Calcite code, this workaround LGTM
public void testCastToDateWithCase() { PCollection<Row> input = pipeline.apply( Create.of(Row.withSchema(INPUT_ROW_SCHEMA).addValues(1).addValue("20181018").build()) .withSchema( INPUT_ROW_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity())); Schema resultType = Schema.builder().addInt32Field("f_int").addDateTimeField("f_date").build(); PCollection<Row> result = input.apply( "sqlQuery", SqlTransform.query( "SELECT f_int, \n" + "CASE WHEN CHAR_LENGTH(TRIM(f_string)) = 8 \n" + " THEN CAST (\n" + " SUBSTRING(TRIM(f_string) FROM 1 FOR 4) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 5 FOR 2) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 7 FOR 2) AS DATE)\n" + " ELSE DATE '2001-01-01'\n" + "END \n" + "FROM PCOLLECTION")); PAssert.that(result) .containsInAnyOrder( Row.withSchema(resultType).addValues(1, new DateTime(2018, 10, 18, 0, 0)).build()); pipeline.run(); }
+ " ELSE DATE '2001-01-01'\n"
public void testCastToDateWithCase() { PCollection<Row> input = pipeline.apply( Create.of(Row.withSchema(INPUT_ROW_SCHEMA).addValues(1).addValue("20181018").build()) .withSchema( INPUT_ROW_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity())); Schema resultType = Schema.builder().addInt32Field("f_int").addDateTimeField("f_date").build(); PCollection<Row> result = input.apply( "sqlQuery", SqlTransform.query( "SELECT f_int, \n" + "CASE WHEN CHAR_LENGTH(TRIM(f_string)) = 8 \n" + " THEN CAST (\n" + " SUBSTRING(TRIM(f_string) FROM 1 FOR 4) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 5 FOR 2) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 7 FOR 2) AS DATE)\n" + " ELSE DATE '2001-01-01'\n" + "END \n" + "FROM PCOLLECTION")); PAssert.that(result) .containsInAnyOrder( Row.withSchema(resultType).addValues(1, new DateTime(2018, 10, 18, 0, 0)).build()); pipeline.run(); }
class BeamSqlCastTest { private static final Schema INPUT_ROW_SCHEMA = Schema.builder().addInt32Field("f_int").addStringField("f_string").build(); @Rule public final TestPipeline pipeline = TestPipeline.create(); @Rule public ExpectedException exceptions = ExpectedException.none(); @Test public void testCastToDate() { PCollection<Row> input = pipeline.apply( Create.of(Row.withSchema(INPUT_ROW_SCHEMA).addValues(1).addValue("20181018").build()) .withSchema( INPUT_ROW_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity())); Schema resultType = Schema.builder().addInt32Field("f_int").addNullableField("f_date", DATETIME).build(); PCollection<Row> result = input.apply( SqlTransform.query( "SELECT f_int, \n" + " CAST( \n" + " SUBSTRING(TRIM(f_string) FROM 1 FOR 4) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 5 FOR 2) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 7 FOR 2) as DATE) \n" + "FROM PCOLLECTION")); PAssert.that(result) .containsInAnyOrder( Row.withSchema(resultType).addValues(1, new DateTime(2018, 10, 18, 0, 0)).build()); pipeline.run(); } @Test public void testCastToDate2() { PCollection<Row> input = pipeline.apply( Create.of(Row.withSchema(INPUT_ROW_SCHEMA).addValues(1).addValue("20181018").build()) .withSchema( INPUT_ROW_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity())); Schema resultType = Schema.builder().addInt32Field("f_int").addNullableField("f_date", DATETIME).build(); PCollection<Row> result = input.apply( SqlTransform.query( "SELECT f_int, \n" + " CAST( \n" + " f_string AS DATE) \n" + "FROM PCOLLECTION")); PAssert.that(result) .containsInAnyOrder( Row.withSchema(resultType).addValues(1, new DateTime(2018, 10, 18, 0, 0)).build()); pipeline.run(); } @Test }
class BeamSqlCastTest { private static final Schema INPUT_ROW_SCHEMA = Schema.builder().addInt32Field("f_int").addStringField("f_string").build(); @Rule public final TestPipeline pipeline = TestPipeline.create(); @Rule public ExpectedException exceptions = ExpectedException.none(); @Test public void testCastToDate() { PCollection<Row> input = pipeline.apply( Create.of(Row.withSchema(INPUT_ROW_SCHEMA).addValues(1).addValue("20181018").build()) .withSchema( INPUT_ROW_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity())); Schema resultType = Schema.builder().addInt32Field("f_int").addNullableField("f_date", DATETIME).build(); PCollection<Row> result = input.apply( SqlTransform.query( "SELECT f_int, \n" + " CAST( \n" + " SUBSTRING(TRIM(f_string) FROM 1 FOR 4) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 5 FOR 2) \n" + " ||'-' \n" + " ||SUBSTRING(TRIM(f_string) FROM 7 FOR 2) as DATE) \n" + "FROM PCOLLECTION")); PAssert.that(result) .containsInAnyOrder( Row.withSchema(resultType).addValues(1, new DateTime(2018, 10, 18, 0, 0)).build()); pipeline.run(); } @Test public void testCastToDate2() { PCollection<Row> input = pipeline.apply( Create.of(Row.withSchema(INPUT_ROW_SCHEMA).addValues(1).addValue("20181018").build()) .withSchema( INPUT_ROW_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity())); Schema resultType = Schema.builder().addInt32Field("f_int").addNullableField("f_date", DATETIME).build(); PCollection<Row> result = input.apply( SqlTransform.query( "SELECT f_int, \n" + " CAST( \n" + " f_string AS DATE) \n" + "FROM PCOLLECTION")); PAssert.that(result) .containsInAnyOrder( Row.withSchema(resultType).addValues(1, new DateTime(2018, 10, 18, 0, 0)).build()); pipeline.run(); } @Test }
It is enough to verify the format of the savepoint. We verify savepoints are working if we restore from the savepoint without exceptions (after relocating it).
private static Stream<Arguments> parameters() { return Stream.of( Arguments.of( SavepointFormatType.CANONICAL, HEAP, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(SavepointKeyedStateHandle.class))), Arguments.of( SavepointFormatType.NATIVE, HEAP, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(KeyGroupsStateHandle.class))), Arguments.of( SavepointFormatType.CANONICAL, ROCKSDB_FULL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(SavepointKeyedStateHandle.class))), Arguments.of( SavepointFormatType.NATIVE, ROCKSDB_FULL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(KeyGroupsStateHandle.class))), Arguments.of( SavepointFormatType.CANONICAL, ROCKSDB_INCREMENTAL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(SavepointKeyedStateHandle.class))), Arguments.of( SavepointFormatType.NATIVE, ROCKSDB_INCREMENTAL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf( IncrementalRemoteKeyedStateHandle.class)))); }
IncrementalRemoteKeyedStateHandle.class))));
private static Stream<Arguments> parameters() { return Stream.of( Arguments.of( SavepointFormatType.CANONICAL, HEAP, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(SavepointKeyedStateHandle.class))), Arguments.of( SavepointFormatType.NATIVE, HEAP, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(KeyGroupsStateHandle.class))), Arguments.of( SavepointFormatType.CANONICAL, ROCKSDB_FULL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(SavepointKeyedStateHandle.class))), Arguments.of( SavepointFormatType.NATIVE, ROCKSDB_FULL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(KeyGroupsStateHandle.class))), Arguments.of( SavepointFormatType.CANONICAL, ROCKSDB_INCREMENTAL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf(SavepointKeyedStateHandle.class))), Arguments.of( SavepointFormatType.NATIVE, ROCKSDB_INCREMENTAL_SNAPSHOTS, (Consumer<KeyedStateHandle>) keyedState -> assertThat( keyedState, instanceOf( IncrementalRemoteKeyedStateHandle.class)))); }
class SavepointFormatITCase { @TempDir Path checkpointsDir; @TempDir Path originalSavepointDir; @TempDir Path renamedSavepointDir; @RegisterExtension LoggerAuditingExtension loggerAuditingExtension = new LoggerAuditingExtension(SavepointFormatITCase.class, Level.INFO); private abstract static class StateBackendConfig { public abstract String getName(); public abstract Configuration getConfiguration(); public int getCheckpointsBeforeSavepoint() { return 0; } @Override public final String toString() { return getName(); } } private static final StateBackendConfig HEAP = new StateBackendConfig() { @Override public String getName() { return "HEAP"; } @Override public Configuration getConfiguration() { Configuration stateBackendConfig = new Configuration(); stateBackendConfig.setString(StateBackendOptions.STATE_BACKEND, "filesystem"); stateBackendConfig.set( CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO); return stateBackendConfig; } }; private static final StateBackendConfig ROCKSDB_FULL_SNAPSHOTS = new StateBackendConfig() { @Override public String getName() { return "ROCKSDB_FULL_SNAPSHOTS"; } @Override public Configuration getConfiguration() { Configuration stateBackendConfig = new Configuration(); stateBackendConfig.setString(StateBackendOptions.STATE_BACKEND, "rocksdb"); stateBackendConfig.set( CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO); stateBackendConfig.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, false); return stateBackendConfig; } }; private static final StateBackendConfig ROCKSDB_INCREMENTAL_SNAPSHOTS = new StateBackendConfig() { @Override public String getName() { return "ROCKSDB_INCREMENTAL_SNAPSHOTS"; } @Override public int getCheckpointsBeforeSavepoint() { return 1; } @Override public Configuration getConfiguration() { Configuration stateBackendConfig = new Configuration(); stateBackendConfig.setString(StateBackendOptions.STATE_BACKEND, "rocksdb"); stateBackendConfig.set( CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO); stateBackendConfig.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true); return stateBackendConfig; } }; @ParameterizedTest(name = "[{index}] {0}, {1}") @MethodSource("parameters") public void testTriggerSavepointAndResumeWithFileBasedCheckpointsAndRelocateBasePath( SavepointFormatType formatType, StateBackendConfig stateBackendConfig, Consumer<KeyedStateHandle> stateHandleVerification) throws Exception { final int numTaskManagers = 2; final int numSlotsPerTaskManager = 2; final Configuration config = stateBackendConfig.getConfiguration(); config.set(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointsDir.toUri().toString()); final MiniClusterWithClientResource miniClusterResource = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(config) .setNumberTaskManagers(numTaskManagers) .setNumberSlotsPerTaskManager(numSlotsPerTaskManager) .build()); miniClusterResource.before(); try { final String savepointPath = submitJobAndTakeSavepoint( miniClusterResource, formatType, stateBackendConfig.getCheckpointsBeforeSavepoint()); final CheckpointMetadata metadata = loadCheckpointMetadata(savepointPath); final OperatorState operatorState = metadata.getOperatorStates().stream().filter(hasKeyedState()).findFirst().get(); operatorState .getStates() .forEach( subtaskState -> { subtaskState .getManagedKeyedState() .forEach(stateHandleVerification); }); relocateAndVerify(miniClusterResource, savepointPath, renamedSavepointDir); } finally { miniClusterResource.after(); } } @NotNull private Predicate<OperatorState> hasKeyedState() { return op -> op.hasSubtaskStates() && op.getStates().stream() .findFirst() .map(subtaskState -> subtaskState.getManagedKeyedState().hasState()) .orElse(false); } private CheckpointMetadata loadCheckpointMetadata(String savepointPath) throws IOException { CompletedCheckpointStorageLocation location = AbstractFsCheckpointStorageAccess.resolveCheckpointPointer(savepointPath); try (DataInputStream stream = new DataInputStream(location.getMetadataHandle().openInputStream())) { return Checkpoints.loadCheckpointMetadata( stream, Thread.currentThread().getContextClassLoader(), savepointPath); } } private void relocateAndVerify( MiniClusterWithClientResource cluster, String savepointPath, Path renamedSavepointDir) throws Exception { final org.apache.flink.core.fs.Path oldPath = new org.apache.flink.core.fs.Path(savepointPath); final org.apache.flink.core.fs.Path newPath = new org.apache.flink.core.fs.Path(renamedSavepointDir.toUri().toString()); (new org.apache.flink.core.fs.Path(savepointPath).getFileSystem()).rename(oldPath, newPath); final JobGraph jobGraph = createJobGraph(); jobGraph.setSavepointRestoreSettings( SavepointRestoreSettings.forPath( renamedSavepointDir.toUri().toString(), false, RestoreMode.CLAIM)); final JobID jobId = jobGraph.getJobID(); ClusterClient<?> client = cluster.getClusterClient(); client.submitJob(jobGraph).get(); waitForAllTaskRunning(cluster.getMiniCluster(), jobId, false); } private String submitJobAndTakeSavepoint( MiniClusterWithClientResource cluster, SavepointFormatType formatType, int checkpointBeforeSavepoint) throws Exception { final JobGraph jobGraph = createJobGraph(); final JobID jobId = jobGraph.getJobID(); ClusterClient<?> client = cluster.getClusterClient(); client.submitJob(jobGraph).get(); waitForAllTaskRunning(cluster.getMiniCluster(), jobId, false); for (int i = 0; i < checkpointBeforeSavepoint; i++) { cluster.getMiniCluster().triggerCheckpoint(jobId).get(); } return client.stopWithSavepoint( jobId, false, originalSavepointDir.toUri().toString(), formatType) .get(); } private static JobGraph createJobGraph() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); env.setRuntimeMode(RuntimeExecutionMode.STREAMING); env.disableOperatorChaining(); env.fromSequence(Long.MIN_VALUE, Long.MAX_VALUE) .keyBy(i -> i % 1000) .map(new StatefulCounter()) .addSink(new DiscardingSink<>()); return env.getStreamGraph().getJobGraph(); } private static final class StatefulCounter extends RichMapFunction<Long, Long> { private ValueState<Long> counter; @Override public void open(Configuration parameters) throws Exception { counter = getRuntimeContext() .getState( new ValueStateDescriptor<>( "counter", BasicTypeInfo.LONG_TYPE_INFO)); } @Override public Long map(Long value) throws Exception { counter.update(Optional.ofNullable(counter.value()).orElse(0L) + value); return counter.value(); } } }
class SavepointFormatITCase { @TempDir Path checkpointsDir; @TempDir Path originalSavepointDir; @TempDir Path renamedSavepointDir; @RegisterExtension LoggerAuditingExtension loggerAuditingExtension = new LoggerAuditingExtension(SavepointFormatITCase.class, Level.INFO); private abstract static class StateBackendConfig { public abstract String getName(); public abstract Configuration getConfiguration(); public int getCheckpointsBeforeSavepoint() { return 0; } @Override public final String toString() { return getName(); } } private static final StateBackendConfig HEAP = new StateBackendConfig() { @Override public String getName() { return "HEAP"; } @Override public Configuration getConfiguration() { Configuration stateBackendConfig = new Configuration(); stateBackendConfig.setString(StateBackendOptions.STATE_BACKEND, "filesystem"); stateBackendConfig.set( CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO); return stateBackendConfig; } }; private static final StateBackendConfig ROCKSDB_FULL_SNAPSHOTS = new StateBackendConfig() { @Override public String getName() { return "ROCKSDB_FULL_SNAPSHOTS"; } @Override public Configuration getConfiguration() { Configuration stateBackendConfig = new Configuration(); stateBackendConfig.setString(StateBackendOptions.STATE_BACKEND, "rocksdb"); stateBackendConfig.set( CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO); stateBackendConfig.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, false); return stateBackendConfig; } }; private static final StateBackendConfig ROCKSDB_INCREMENTAL_SNAPSHOTS = new StateBackendConfig() { @Override public String getName() { return "ROCKSDB_INCREMENTAL_SNAPSHOTS"; } @Override public int getCheckpointsBeforeSavepoint() { return 1; } @Override public Configuration getConfiguration() { Configuration stateBackendConfig = new Configuration(); stateBackendConfig.setString(StateBackendOptions.STATE_BACKEND, "rocksdb"); stateBackendConfig.set( CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO); stateBackendConfig.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true); return stateBackendConfig; } }; @ParameterizedTest(name = "[{index}] {0}, {1}") @MethodSource("parameters") public void testTriggerSavepointAndResumeWithFileBasedCheckpointsAndRelocateBasePath( SavepointFormatType formatType, StateBackendConfig stateBackendConfig, Consumer<KeyedStateHandle> stateHandleVerification) throws Exception { final int numTaskManagers = 2; final int numSlotsPerTaskManager = 2; final Configuration config = stateBackendConfig.getConfiguration(); config.set(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointsDir.toUri().toString()); final MiniClusterWithClientResource miniClusterResource = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(config) .setNumberTaskManagers(numTaskManagers) .setNumberSlotsPerTaskManager(numSlotsPerTaskManager) .build()); miniClusterResource.before(); try { final String savepointPath = submitJobAndTakeSavepoint( miniClusterResource, formatType, stateBackendConfig.getCheckpointsBeforeSavepoint()); final CheckpointMetadata metadata = loadCheckpointMetadata(savepointPath); final OperatorState operatorState = metadata.getOperatorStates().stream().filter(hasKeyedState()).findFirst().get(); operatorState .getStates() .forEach( subtaskState -> { subtaskState .getManagedKeyedState() .forEach(stateHandleVerification); }); relocateAndVerify(miniClusterResource, savepointPath, renamedSavepointDir); } finally { miniClusterResource.after(); } } @NotNull private Predicate<OperatorState> hasKeyedState() { return op -> op.hasSubtaskStates() && op.getStates().stream() .findFirst() .map(subtaskState -> subtaskState.getManagedKeyedState().hasState()) .orElse(false); } private CheckpointMetadata loadCheckpointMetadata(String savepointPath) throws IOException { CompletedCheckpointStorageLocation location = AbstractFsCheckpointStorageAccess.resolveCheckpointPointer(savepointPath); try (DataInputStream stream = new DataInputStream(location.getMetadataHandle().openInputStream())) { return Checkpoints.loadCheckpointMetadata( stream, Thread.currentThread().getContextClassLoader(), savepointPath); } } private void relocateAndVerify( MiniClusterWithClientResource cluster, String savepointPath, Path renamedSavepointDir) throws Exception { final org.apache.flink.core.fs.Path oldPath = new org.apache.flink.core.fs.Path(savepointPath); final org.apache.flink.core.fs.Path newPath = new org.apache.flink.core.fs.Path(renamedSavepointDir.toUri().toString()); (new org.apache.flink.core.fs.Path(savepointPath).getFileSystem()).rename(oldPath, newPath); final JobGraph jobGraph = createJobGraph(); jobGraph.setSavepointRestoreSettings( SavepointRestoreSettings.forPath( renamedSavepointDir.toUri().toString(), false, RestoreMode.CLAIM)); final JobID jobId = jobGraph.getJobID(); ClusterClient<?> client = cluster.getClusterClient(); client.submitJob(jobGraph).get(); waitForAllTaskRunning(cluster.getMiniCluster(), jobId, false); } private String submitJobAndTakeSavepoint( MiniClusterWithClientResource cluster, SavepointFormatType formatType, int checkpointBeforeSavepoint) throws Exception { final JobGraph jobGraph = createJobGraph(); final JobID jobId = jobGraph.getJobID(); ClusterClient<?> client = cluster.getClusterClient(); client.submitJob(jobGraph).get(); waitForAllTaskRunning(cluster.getMiniCluster(), jobId, false); for (int i = 0; i < checkpointBeforeSavepoint; i++) { cluster.getMiniCluster().triggerCheckpoint(jobId).get(); } return client.stopWithSavepoint( jobId, false, originalSavepointDir.toUri().toString(), formatType) .get(); } private static JobGraph createJobGraph() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); env.setRuntimeMode(RuntimeExecutionMode.STREAMING); env.disableOperatorChaining(); env.fromSequence(Long.MIN_VALUE, Long.MAX_VALUE) .keyBy(i -> i % 1000) .map(new StatefulCounter()) .addSink(new DiscardingSink<>()); return env.getStreamGraph().getJobGraph(); } private static final class StatefulCounter extends RichMapFunction<Long, Long> { private ValueState<Long> counter; @Override public void open(Configuration parameters) throws Exception { counter = getRuntimeContext() .getState( new ValueStateDescriptor<>( "counter", BasicTypeInfo.LONG_TYPE_INFO)); } @Override public Long map(Long value) throws Exception { counter.update(Optional.ofNullable(counter.value()).orElse(0L) + value); return counter.value(); } } }
We're missing tests for using the delivery state on success and not.
public void testSendWithTransaction() { Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); AmqpTransaction transaction = new AmqpTransaction(ByteBuffer.wrap("1".getBytes())); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), any(AmqpTransaction.class)); StepVerifier.create(spyReactorSender.send(message, transaction)) .verifyComplete(); StepVerifier.create(spyReactorSender.send(message, transaction)) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(2)).send(any(byte[].class), anyInt(), anyInt(), ArgumentMatchers.same(transaction)); }
ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager,
public void testSendWithTransaction() { Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), eq(transactionalState)); StepVerifier.create(spyReactorSender.send(message, transactionalState)) .verifyComplete(); StepVerifier.create(spyReactorSender.send(message, transactionalState)) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(2)).send(any(byte[].class), anyInt(), eq(DeliveryImpl.DEFAULT_MESSAGE_FORMAT), eq(transactionalState)); }
class ReactorSenderTest { private String entityPath = "entity-path"; @Mock private Sender sender; @Mock private SendLinkHandler handler; @Mock private ReactorProvider reactorProvider; @Mock private TokenManager tokenManager; @Mock private Reactor reactor; @Mock private Selectable selectable; @Mock private MessageSerializer messageSerializer; @BeforeEach public void setup() throws IOException { MockitoAnnotations.initMocks(this); Delivery delivery = mock(Delivery.class); when(delivery.getRemoteState()).thenReturn(Accepted.getInstance()); when(delivery.getTag()).thenReturn("tag".getBytes()); when(handler.getDeliveredMessages()).thenReturn(Flux.just(delivery)); when(reactor.selectable()).thenReturn(selectable); when(handler.getLinkCredits()).thenReturn(Flux.just(100)); when(handler.getEndpointStates()).thenReturn(Flux.just(EndpointState.ACTIVE)); when(handler.getErrors()).thenReturn(Flux.empty()); when(tokenManager.getAuthorizationResults()).thenReturn(Flux.just(AmqpResponseCode.ACCEPTED)); when(sender.getCredit()).thenReturn(0); doNothing().when(selectable).setChannel(any()); doNothing().when(selectable).onReadable(any()); doNothing().when(selectable).onFree(any()); doNothing().when(selectable).setReading(true); doNothing().when(reactor).update(selectable); ReactorDispatcher reactorDispatcher = new ReactorDispatcher(reactor); when(reactor.attachments()).thenReturn(new Record() { @Override public <T> T get(Object o, Class<T> aClass) { return null; } @Override public <T> void set(Object o, Class<T> aClass, T t) { } @Override public void clear() { } }); when(reactorProvider.getReactorDispatcher()).thenReturn(reactorDispatcher); when(sender.getRemoteMaxMessageSize()).thenReturn(UnsignedLong.valueOf(1000)); } @Test public void testLinkSize() throws IOException { ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); StepVerifier.create(reactorSender.getLinkSize()) .expectNext(1000) .verifyComplete(); StepVerifier.create(reactorSender.getLinkSize()) .expectNext(1000) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); } /** * Testing that we can send message with transaction. */ @Test @Test public void testSend() { Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), isNull()); StepVerifier.create(spyReactorSender.send(message)) .verifyComplete(); StepVerifier.create(spyReactorSender.send(message)) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(2)).send(any(byte[].class), anyInt(), anyInt(), isNull()); } @Test public void testSendBatch() { Message message = Proton.message(); message.setMessageId("id1"); message.setBody(new AmqpValue("hello")); Message message2 = Proton.message(); message2.setMessageId("id2"); message2.setBody(new AmqpValue("world")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), isNull()); StepVerifier.create(spyReactorSender.send(Arrays.asList(message, message2))) .verifyComplete(); StepVerifier.create(spyReactorSender.send(Arrays.asList(message, message2))) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(2)).send(any(byte[].class), anyInt(), anyInt(), isNull()); } @Test public void testLinkSizeSmallerThanMessageSize() { when(sender.getRemoteMaxMessageSize()).thenReturn(UnsignedLong.valueOf(10)); Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), isNull()); StepVerifier.create(spyReactorSender.send(message)) .verifyErrorSatisfies(throwable -> { Assertions.assertTrue(throwable instanceof AmqpException); Assertions.assertTrue(throwable.getMessage().startsWith("Error sending. Size of the payload exceeded " + "maximum message size")); }); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(0)).send(any(byte[].class), anyInt(), anyInt(), isNull()); } }
class ReactorSenderTest { private String entityPath = "entity-path"; @Mock private Sender sender; @Mock private SendLinkHandler handler; @Mock private ReactorProvider reactorProvider; @Mock private TokenManager tokenManager; @Mock private Reactor reactor; @Mock private Selectable selectable; @Mock private MessageSerializer messageSerializer; @Mock private TransactionalState transactionalState; @Captor private ArgumentCaptor<Runnable> dispatcherCaptor; @Captor private ArgumentCaptor<DeliveryState> deliveryStateArgumentCaptor; @BeforeEach public void setup() throws IOException { MockitoAnnotations.initMocks(this); Delivery delivery = mock(Delivery.class); when(delivery.getRemoteState()).thenReturn(Accepted.getInstance()); when(delivery.getTag()).thenReturn("tag".getBytes()); when(handler.getDeliveredMessages()).thenReturn(Flux.just(delivery)); when(reactor.selectable()).thenReturn(selectable); when(handler.getLinkCredits()).thenReturn(Flux.just(100)); final ReplayProcessor<EndpointState> endpointStateReplayProcessor = ReplayProcessor.cacheLast(); when(handler.getEndpointStates()).thenReturn(endpointStateReplayProcessor); FluxSink<EndpointState> sink1 = endpointStateReplayProcessor.sink(); sink1.next(EndpointState.ACTIVE); when(handler.getErrors()).thenReturn(Flux.empty()); when(tokenManager.getAuthorizationResults()).thenReturn(Flux.just(AmqpResponseCode.ACCEPTED)); when(sender.getCredit()).thenReturn(100); when(sender.advance()).thenReturn(true); doNothing().when(selectable).setChannel(any()); doNothing().when(selectable).onReadable(any()); doNothing().when(selectable).onFree(any()); doNothing().when(selectable).setReading(true); doNothing().when(reactor).update(selectable); ReactorDispatcher reactorDispatcher = new ReactorDispatcher(reactor); when(reactor.attachments()).thenReturn(new Record() { @Override public <T> T get(Object o, Class<T> aClass) { return null; } @Override public <T> void set(Object o, Class<T> aClass, T t) { } @Override public void clear() { } }); when(reactorProvider.getReactorDispatcher()).thenReturn(reactorDispatcher); when(sender.getRemoteMaxMessageSize()).thenReturn(UnsignedLong.valueOf(1000)); } @Test public void testLinkSize() { ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); StepVerifier.create(reactorSender.getLinkSize()) .expectNext(1000) .verifyComplete(); StepVerifier.create(reactorSender.getLinkSize()) .expectNext(1000) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); } @Test public void testSendWithTransactionFailed() { Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); final String exceptionString = "fake exception"; ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); Throwable exception = new RuntimeException(exceptionString); doReturn(Mono.error(exception)).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), eq(transactionalState)); StepVerifier.create(spyReactorSender.send(message, transactionalState)) .verifyErrorMessage(exceptionString); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender).send(any(byte[].class), anyInt(), eq(DeliveryImpl.DEFAULT_MESSAGE_FORMAT), eq(transactionalState)); } /** * Testing that we can send message with transaction. */ @Test /** * Testing that we can send message with transaction. */ @Test public void testSendWithTransactionDeliverySet() throws IOException { Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); when(sender.send(any(byte[].class), anyInt(), anyInt())).thenReturn(26); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorDispatcher reactorDispatcherMock = mock(ReactorDispatcher.class); when(reactorProvider.getReactorDispatcher()).thenReturn(reactorDispatcherMock); doNothing().when(reactorDispatcherMock).invoke(any(Runnable.class)); final Delivery deliveryToSend = mock(Delivery.class); doNothing().when(deliveryToSend).setMessageFormat(anyInt()); doNothing().when(deliveryToSend).disposition(deliveryStateArgumentCaptor.capture()); when(sender.delivery(any(byte[].class))).thenReturn(deliveryToSend); reactorSender.send(message, transactionalState).subscribe(); verify(reactorDispatcherMock).invoke(dispatcherCaptor.capture()); List<Runnable> invocations = dispatcherCaptor.getAllValues(); invocations.get(0).run(); DeliveryState deliveryState = deliveryStateArgumentCaptor.getValue(); Assertions.assertSame(transactionalState, deliveryState); verify(sender).getRemoteMaxMessageSize(); verify(sender).advance(); } @Test public void testSend() { Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), isNull()); StepVerifier.create(spyReactorSender.send(message)) .verifyComplete(); StepVerifier.create(spyReactorSender.send(message)) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(2)).send(any(byte[].class), anyInt(), anyInt(), isNull()); } @Test public void testSendBatch() { Message message = Proton.message(); message.setMessageId("id1"); message.setBody(new AmqpValue("hello")); Message message2 = Proton.message(); message2.setMessageId("id2"); message2.setBody(new AmqpValue("world")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), isNull()); StepVerifier.create(spyReactorSender.send(Arrays.asList(message, message2))) .verifyComplete(); StepVerifier.create(spyReactorSender.send(Arrays.asList(message, message2))) .verifyComplete(); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(2)).send(any(byte[].class), anyInt(), anyInt(), isNull()); } @Test public void testLinkSizeSmallerThanMessageSize() { when(sender.getRemoteMaxMessageSize()).thenReturn(UnsignedLong.valueOf(10)); Message message = Proton.message(); message.setMessageId("id"); message.setBody(new AmqpValue("hello")); ReactorSender reactorSender = new ReactorSender(entityPath, sender, handler, reactorProvider, tokenManager, messageSerializer, Duration.ofSeconds(1), new ExponentialAmqpRetryPolicy(new AmqpRetryOptions())); ReactorSender spyReactorSender = spy(reactorSender); doReturn(Mono.empty()).when(spyReactorSender).send(any(byte[].class), anyInt(), anyInt(), isNull()); StepVerifier.create(spyReactorSender.send(message)) .verifyErrorSatisfies(throwable -> { Assertions.assertTrue(throwable instanceof AmqpException); Assertions.assertTrue(throwable.getMessage().startsWith("Error sending. Size of the payload exceeded " + "maximum message size")); }); verify(sender, times(1)).getRemoteMaxMessageSize(); verify(spyReactorSender, times(0)).send(any(byte[].class), anyInt(), anyInt(), isNull()); } }
```suggestion out.append("Build a Ballerina module(s)/file and produce an executable JAR file(s). \n"); ```
public void printLongDesc(StringBuilder out) { out.append("Build Ballerina module(s)/file and produce an executable JAR file(s). \n"); out.append("\n"); out.append("Build a Ballerina project or a specific module in a project. The \n"); out.append("executable \".jar\" files will be created in the <PROJECT-ROOT>/target/bin directory. \n"); out.append("\n"); out.append("Build a single Ballerina file. This creates an executable .jar file in the \n"); out.append("current directory. The name of the executable file will be \n"); out.append("<ballerina-file-name>.jar. \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to the given output file name. The -o flag will only \n"); out.append("work for single files. \n"); }
out.append("Build Ballerina module(s)/file and produce an executable JAR file(s). \n");
public void printLongDesc(StringBuilder out) { out.append("Build a Ballerina module(s)/file and produce an executable JAR file(s). \n"); out.append("\n"); out.append("Build a Ballerina project or a specific module in a project. The \n"); out.append("executable \".jar\" files will be created in the <PROJECT-ROOT>/target/bin directory. \n"); out.append("\n"); out.append("Build a single Ballerina file. This creates an executable .jar file in the \n"); out.append("current directory. The name of the executable file will be \n"); out.append("<ballerina-file-name>.jar. \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to the given output file name. The -o flag will only \n"); out.append("work for single files. \n"); }
class BuildCommand implements BLauncherCmd { private final PrintStream outStream; private final PrintStream errStream; private Path sourceRootPath; private boolean exitWhenFinish; private boolean skipCopyLibsFromDist; public BuildCommand() { this.sourceRootPath = Paths.get(System.getProperty("user.dir")); this.outStream = System.out; this.errStream = System.err; this.exitWhenFinish = true; this.skipCopyLibsFromDist = false; } public BuildCommand(Path userDir, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist) { this.sourceRootPath = userDir; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; } public BuildCommand(Path userDir, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, Path executableOutputDir) { this.sourceRootPath = userDir; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.output = executableOutputDir.toString(); } @CommandLine.Option(names = {"--sourceroot"}, description = "Path to the directory containing the source files and modules") private String sourceRoot; @CommandLine.Option(names = {"--compile", "-c"}, description = "Compile the source without generating " + "executable(s).") private boolean compile; @CommandLine.Option(names = {"--all", "-a"}, description = "Build or compile all the modules of the project.") private boolean buildAll; @CommandLine.Option(names = {"--output", "-o"}, description = "Write the output to the given file. The provided " + "output file name may or may not contain the '.jar' " + "extension.") private String output; @CommandLine.Option(names = {"--offline"}, description = "Build/Compile offline without downloading " + "dependencies.") private boolean offline; @CommandLine.Option(names = {"--skip-lock"}, description = "Skip using the lock file to resolve dependencies.") private boolean skipLock; @CommandLine.Option(names = {"--skip-tests"}, description = "Skip test compilation and execution.") private boolean skipTests; @CommandLine.Parameters private List<String> argList; @CommandLine.Option(names = {"--native"}, hidden = true, description = "Compile a Ballerina program to a native binary.") private boolean nativeBinary; @CommandLine.Option(names = "--dump-bir", hidden = true) private boolean dumpBIR; @CommandLine.Option(names = "--dump-llvm-ir", hidden = true) private boolean dumpLLVMIR; @CommandLine.Option(names = "--no-optimize-llvm", hidden = true) private boolean noOptimizeLlvm; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = "--experimental", description = "Enable experimental language features.") private boolean experimentalFlag; private static final String buildCmd = "ballerina build [-o <output>] [--sourceroot] [--offline] [--skip-tests]\n" + " [--skip-lock] {<ballerina-file | module-name> | -a | --all} [--] [(--key=value)...]"; public void execute() { if (this.helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(BUILD_COMMAND); this.errStream.println(commandUsageInfo); return; } String[] args = LaunchUtils .initConfigurations(this.argList == null ? new String[0] : this.argList.toArray(new String[0])); if (args.length > 1) { CommandUtil.printError(this.errStream, "too many arguments.", buildCmd, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (!this.buildAll && (this.argList == null || this.argList.size() == 0)) { CommandUtil.printError(this.errStream, "'build' command requires a module name or a Ballerina file to build/compile. Use '-a' or " + "'--all' to build/compile all the modules of the project.", "ballerina build {<ballerina-file> | <module-name> | -a | --all}", false); CommandUtil.exitError(this.exitWhenFinish); return; } this.sourceRootPath = null != this.sourceRoot ? Paths.get(this.sourceRoot).toAbsolutePath() : this.sourceRootPath; Path sourcePath = null; Path targetPath; if (this.buildAll) { if (null != this.output) { CommandUtil.printError(this.errStream, "'-o' and '--output' are only supported for building a single Ballerina " + "file.", "ballerina build -o <output-file> <ballerina-file> ", true); CommandUtil.exitError(this.exitWhenFinish); return; } if (!ProjectDirs.isProject(this.sourceRootPath)) { Path findRoot = ProjectDirs.findProjectRoot(this.sourceRootPath); if (null == findRoot) { CommandUtil.printError(this.errStream, "you are trying to build/compile a Ballerina project that does not have a " + "Ballerina.toml file.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } this.sourceRootPath = findRoot; } targetPath = this.sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME); if (args.length > 0) { CommandUtil.printError(this.errStream, "too many arguments.", buildCmd, false); CommandUtil.exitError(this.exitWhenFinish); return; } } else if (this.argList.get(0).endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX)) { if (this.compile) { CommandUtil.printError(this.errStream, "'-c' or '--compile' can only be used with modules.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } else { if (Paths.get(this.argList.get(0)).isAbsolute()) { sourcePath = Paths.get(this.argList.get(0)); this.sourceRootPath = sourcePath.getParent(); } else { sourcePath = this.sourceRootPath.resolve(this.argList.get(0)); } if (Files.notExists(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' Ballerina file does not exist.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (!Files.isRegularFile(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' is not a Ballerina file. Check if it is a symlink or a shortcut.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } try { targetPath = Files.createTempDirectory("ballerina-build-" + System.nanoTime()); } catch (IOException e) { throw LauncherUtils.createLauncherException("Error occurred when creating executable."); } } } else if (Files.exists( this.sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0))) && Files.isDirectory( this.sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME) .resolve(this.argList.get(0)))) { if (null != this.output) { CommandUtil.printError(this.errStream, "'-o' and '--output' are only supported for building a single Ballerina " + "file.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (!RepoUtils.isBallerinaProject(this.sourceRootPath)) { CommandUtil.printError(this.errStream, "you are trying to build/compile a module that is not inside a project.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (Paths.get(argList.get(0)).isAbsolute()) { CommandUtil.printError(this.errStream, "you are trying to build/compile a module by giving the absolute path. You " + "only need to give the name of the module.", "ballerina build [-c] <module-name>", true); CommandUtil.exitError(this.exitWhenFinish); return; } String moduleName = argList.get(0); if (moduleName.endsWith("/")) { moduleName = moduleName.substring(0, moduleName.length() - 1); } sourcePath = Paths.get(moduleName); if (Files.notExists(this.sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(sourcePath))) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' module does not exist.", "ballerina build [-c] <module-name>", true); CommandUtil.exitError(this.exitWhenFinish); return; } targetPath = this.sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME); } else { CommandUtil.printError(this.errStream, "invalid Ballerina source path. It should either be a name of a module in a " + "Ballerina project or a file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension. Use -a or --all " + "to build or compile all modules.", "ballerina build {<ballerina-file> | <module-name> | -a | --all}", true); CommandUtil.exitError(this.exitWhenFinish); return; } this.sourceRootPath = this.sourceRootPath.normalize(); sourcePath = sourcePath == null ? null : sourcePath.normalize(); targetPath = targetPath.normalize(); CompilerContext compilerContext = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(compilerContext); options.put(PROJECT_DIR, this.sourceRootPath.toString()); options.put(OFFLINE, Boolean.toString(this.offline)); options.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString()); options.put(LOCK_ENABLED, Boolean.toString(!this.skipLock)); options.put(SKIP_TESTS, Boolean.toString(this.skipTests)); options.put(TEST_ENABLED, "true"); options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(this.experimentalFlag)); options.put(PRESERVE_WHITESPACE, "true"); BuildContext buildContext = new BuildContext(this.sourceRootPath, targetPath, sourcePath, compilerContext); buildContext.setOut(outStream); buildContext.setErr(errStream); boolean isSingleFileBuild = buildContext.getSourceType().equals(SINGLE_BAL_FILE); Path outputPath = null == this.output ? Paths.get(System.getProperty("user.dir")) : Paths.get(this.output); TaskExecutor taskExecutor = new TaskExecutor.TaskBuilder() .addTask(new CleanTargetDirTask(), isSingleFileBuild) .addTask(new CreateTargetDirTask()) .addTask(new CompileTask()) .addTask(new CreateLockFileTask(), this.skipLock || isSingleFileBuild) .addTask(new CreateBaloTask(), isSingleFileBuild) .addTask(new CreateBirTask()) .addTask(new CopyNativeLibTask(skipCopyLibsFromDist)) .addTask(new CreateJarTask(this.dumpBIR, skipCopyLibsFromDist, this.nativeBinary, this.dumpLLVMIR, this.noOptimizeLlvm)) .addTask(new CopyModuleJarTask(skipCopyLibsFromDist)) .addTask(new RunTestsTask(), this.skipTests || isSingleFileBuild) .addTask(new CreateExecutableTask(), this.compile) .addTask(new CopyExecutableTask(outputPath), !isSingleFileBuild) .addTask(new PrintExecutablePathTask(), this.compile) .addTask(new RunCompilerPluginTask(), this.compile) .addTask(new CleanTargetDirTask(), !isSingleFileBuild) .build(); taskExecutor.executeTasks(buildContext); if (this.exitWhenFinish) { Runtime.getRuntime().exit(0); } } @Override public String getName() { return BUILD_COMMAND; } @Override @Override public void printUsage(StringBuilder out) { out.append(" ballerina build [-o <output-file>] [--offline] [--skip-tests] [--skip-lock] " + "{<ballerina-file | module-name> | -a | --all} [--] [(--key=value)...]\n"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } }
class BuildCommand implements BLauncherCmd { private final PrintStream outStream; private final PrintStream errStream; private Path sourceRootPath; private boolean exitWhenFinish; private boolean skipCopyLibsFromDist; public BuildCommand() { this.sourceRootPath = Paths.get(System.getProperty("user.dir")); this.outStream = System.out; this.errStream = System.err; this.exitWhenFinish = true; this.skipCopyLibsFromDist = false; } public BuildCommand(Path userDir, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist) { this.sourceRootPath = userDir; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; } public BuildCommand(Path userDir, PrintStream outStream, PrintStream errStream, boolean exitWhenFinish, boolean skipCopyLibsFromDist, Path executableOutputDir) { this.sourceRootPath = userDir; this.outStream = outStream; this.errStream = errStream; this.exitWhenFinish = exitWhenFinish; this.skipCopyLibsFromDist = skipCopyLibsFromDist; this.output = executableOutputDir.toString(); } @CommandLine.Option(names = {"--sourceroot"}, description = "Path to the directory containing the source files and modules") private String sourceRoot; @CommandLine.Option(names = {"--compile", "-c"}, description = "Compile the source without generating " + "executable(s).") private boolean compile; @CommandLine.Option(names = {"--all", "-a"}, description = "Build or compile all the modules of the project.") private boolean buildAll; @CommandLine.Option(names = {"--output", "-o"}, description = "Write the output to the given file. The provided " + "output file name may or may not contain the " + "'.jar' extension.") private String output; @CommandLine.Option(names = {"--offline"}, description = "Build/Compile offline without downloading " + "dependencies.") private boolean offline; @CommandLine.Option(names = {"--skip-lock"}, description = "Skip using the lock file to resolve dependencies.") private boolean skipLock; @CommandLine.Option(names = {"--skip-tests"}, description = "Skip test compilation and execution.") private boolean skipTests; @CommandLine.Parameters private List<String> argList; @CommandLine.Option(names = {"--native"}, hidden = true, description = "Compile a Ballerina program to a native binary.") private boolean nativeBinary; @CommandLine.Option(names = "--dump-bir", hidden = true) private boolean dumpBIR; @CommandLine.Option(names = "--dump-llvm-ir", hidden = true) private boolean dumpLLVMIR; @CommandLine.Option(names = "--no-optimize-llvm", hidden = true) private boolean noOptimizeLlvm; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = "--experimental", description = "Enable experimental language features.") private boolean experimentalFlag; private static final String buildCmd = "ballerina build [-o <output>] [--sourceroot] [--offline] [--skip-tests]\n" + " [--skip-lock] {<ballerina-file | module-name> | -a | --all} [--] [(--key=value)...]"; public void execute() { if (this.helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(BUILD_COMMAND); this.errStream.println(commandUsageInfo); return; } String[] args = LaunchUtils .initConfigurations(this.argList == null ? new String[0] : this.argList.toArray(new String[0])); if (args.length > 1) { CommandUtil.printError(this.errStream, "too many arguments.", buildCmd, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (!this.buildAll && (this.argList == null || this.argList.size() == 0)) { CommandUtil.printError(this.errStream, "'build' command requires a module name or a Ballerina file to build/compile. Use '-a' or " + "'--all' to build/compile all the modules of the project.", "ballerina build {<ballerina-file> | <module-name> | -a | --all}", false); CommandUtil.exitError(this.exitWhenFinish); return; } this.sourceRootPath = null != this.sourceRoot ? Paths.get(this.sourceRoot).toAbsolutePath() : this.sourceRootPath; Path sourcePath = null; Path targetPath; if (this.buildAll) { if (null != this.output) { CommandUtil.printError(this.errStream, "'-o' and '--output' are only supported when building a single Ballerina " + "file.", "ballerina build -o <output-file> <ballerina-file> ", true); CommandUtil.exitError(this.exitWhenFinish); return; } if (!ProjectDirs.isProject(this.sourceRootPath)) { Path findRoot = ProjectDirs.findProjectRoot(this.sourceRootPath); if (null == findRoot) { CommandUtil.printError(this.errStream, "you are trying to build/compile a Ballerina project that does not have a " + "Ballerina.toml file.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } this.sourceRootPath = findRoot; } targetPath = this.sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME); if (args.length > 0) { CommandUtil.printError(this.errStream, "too many arguments.", buildCmd, false); CommandUtil.exitError(this.exitWhenFinish); return; } } else if (this.argList.get(0).endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX)) { if (this.compile) { CommandUtil.printError(this.errStream, "'-c' or '--compile' can only be used with modules.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } else { if (Paths.get(this.argList.get(0)).isAbsolute()) { sourcePath = Paths.get(this.argList.get(0)); this.sourceRootPath = sourcePath.getParent(); } else { sourcePath = this.sourceRootPath.resolve(this.argList.get(0)); } if (Files.notExists(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' Ballerina file does not exist.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (!Files.isRegularFile(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' is not a Ballerina file. Check if it is a symlink or a shortcut.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } try { targetPath = Files.createTempDirectory("ballerina-build-" + System.nanoTime()); } catch (IOException e) { throw LauncherUtils.createLauncherException("Error occurred when creating the executable."); } } } else if (Files.exists( this.sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0))) && Files.isDirectory( this.sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME) .resolve(this.argList.get(0)))) { if (null != this.output) { CommandUtil.printError(this.errStream, "'-o' and '--output' are only supported for building a single Ballerina " + "file.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (!RepoUtils.isBallerinaProject(this.sourceRootPath)) { CommandUtil.printError(this.errStream, "you are trying to build/compile a module that is not inside a project.", null, false); CommandUtil.exitError(this.exitWhenFinish); return; } if (Paths.get(argList.get(0)).isAbsolute()) { CommandUtil.printError(this.errStream, "you are trying to build/compile a module giving the absolute path. You " + "only need to give the name of the module.", "ballerina build [-c] <module-name>", true); CommandUtil.exitError(this.exitWhenFinish); return; } String moduleName = argList.get(0); if (moduleName.endsWith("/")) { moduleName = moduleName.substring(0, moduleName.length() - 1); } sourcePath = Paths.get(moduleName); if (Files.notExists(this.sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(sourcePath))) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' module does not exist.", "ballerina build [-c] <module-name>", true); CommandUtil.exitError(this.exitWhenFinish); return; } targetPath = this.sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME); } else { CommandUtil.printError(this.errStream, "invalid Ballerina source path. It should either be a name of a module in a " + "Ballerina project or a file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension. Use -a or --all " + "to build or compile all modules.", "ballerina build {<ballerina-file> | <module-name> | -a | --all}", true); CommandUtil.exitError(this.exitWhenFinish); return; } this.sourceRootPath = this.sourceRootPath.normalize(); sourcePath = sourcePath == null ? null : sourcePath.normalize(); targetPath = targetPath.normalize(); CompilerContext compilerContext = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(compilerContext); options.put(PROJECT_DIR, this.sourceRootPath.toString()); options.put(OFFLINE, Boolean.toString(this.offline)); options.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString()); options.put(LOCK_ENABLED, Boolean.toString(!this.skipLock)); options.put(SKIP_TESTS, Boolean.toString(this.skipTests)); options.put(TEST_ENABLED, "true"); options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(this.experimentalFlag)); options.put(PRESERVE_WHITESPACE, "true"); BuildContext buildContext = new BuildContext(this.sourceRootPath, targetPath, sourcePath, compilerContext); buildContext.setOut(outStream); buildContext.setErr(errStream); boolean isSingleFileBuild = buildContext.getSourceType().equals(SINGLE_BAL_FILE); Path outputPath = null == this.output ? Paths.get(System.getProperty("user.dir")) : Paths.get(this.output); TaskExecutor taskExecutor = new TaskExecutor.TaskBuilder() .addTask(new CleanTargetDirTask(), isSingleFileBuild) .addTask(new CreateTargetDirTask()) .addTask(new CompileTask()) .addTask(new CreateLockFileTask(), this.skipLock || isSingleFileBuild) .addTask(new CreateBaloTask(), isSingleFileBuild) .addTask(new CreateBirTask()) .addTask(new CopyNativeLibTask(skipCopyLibsFromDist)) .addTask(new CreateJarTask(this.dumpBIR, skipCopyLibsFromDist, this.nativeBinary, this.dumpLLVMIR, this.noOptimizeLlvm)) .addTask(new CopyModuleJarTask(skipCopyLibsFromDist)) .addTask(new RunTestsTask(), this.skipTests || isSingleFileBuild) .addTask(new CreateExecutableTask(), this.compile) .addTask(new CopyExecutableTask(outputPath), !isSingleFileBuild) .addTask(new PrintExecutablePathTask(), this.compile) .addTask(new RunCompilerPluginTask(), this.compile) .addTask(new CleanTargetDirTask(), !isSingleFileBuild) .build(); taskExecutor.executeTasks(buildContext); if (this.exitWhenFinish) { Runtime.getRuntime().exit(0); } } @Override public String getName() { return BUILD_COMMAND; } @Override @Override public void printUsage(StringBuilder out) { out.append(" ballerina build [-o <output-file>] [--offline] [--skip-tests] [--skip-lock] " + "{<ballerina-file | module-name> | -a | --all} [--] [(--key=value)...]\n"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } }
I like `Closer`. We don't have to change it for this PR but consider it for a potential improvement in the future.
private void terminateMiniClusterServices() throws Exception { Exception exception = null; synchronized (lock) { if (blobCacheService != null) { try { blobCacheService.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } blobCacheService = null; } if (blobServer != null) { try { blobServer.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } blobServer = null; } if (haServices != null) { try { haServices.closeAndCleanupAllData(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } haServices = null; } try { rpcSystem.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } if (exception != null) { throw exception; } } }
try {
private void terminateMiniClusterServices() throws Exception { Exception exception = null; synchronized (lock) { if (blobCacheService != null) { try { blobCacheService.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } blobCacheService = null; } if (blobServer != null) { try { blobServer.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } blobServer = null; } if (haServices != null) { try { haServices.closeAndCleanupAllData(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } haServices = null; } try { rpcSystem.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } if (exception != null) { throw exception; } } }
class MiniCluster implements AutoCloseableAsync { private static final Logger LOG = LoggerFactory.getLogger(MiniCluster.class); /** The lock to guard startup / shutdown / manipulation methods. */ private final Object lock = new Object(); /** The configuration for this mini cluster. */ private final MiniClusterConfiguration miniClusterConfiguration; private final Time rpcTimeout; @GuardedBy("lock") private final List<TaskExecutor> taskManagers; private final TerminatingFatalErrorHandlerFactory taskManagerTerminatingFatalErrorHandlerFactory = new TerminatingFatalErrorHandlerFactory(); private CompletableFuture<Void> terminationFuture; @GuardedBy("lock") private MetricRegistryImpl metricRegistry; @GuardedBy("lock") private ProcessMetricGroup processMetricGroup; @GuardedBy("lock") private RpcService commonRpcService; @GuardedBy("lock") private ExecutorService ioExecutor; @GuardedBy("lock") private final Collection<RpcService> rpcServices; @GuardedBy("lock") private HighAvailabilityServices haServices; @GuardedBy("lock") private BlobServer blobServer; @GuardedBy("lock") private HeartbeatServices heartbeatServices; @GuardedBy("lock") private BlobCacheService blobCacheService; @GuardedBy("lock") private LeaderRetrievalService resourceManagerLeaderRetriever; @GuardedBy("lock") private LeaderRetrievalService dispatcherLeaderRetriever; @GuardedBy("lock") private LeaderRetrievalService clusterRestEndpointLeaderRetrievalService; @GuardedBy("lock") private Collection<DispatcherResourceManagerComponent> dispatcherResourceManagerComponents; @GuardedBy("lock") private RpcGatewayRetriever<DispatcherId, DispatcherGateway> dispatcherGatewayRetriever; @GuardedBy("lock") private RpcGatewayRetriever<ResourceManagerId, ResourceManagerGateway> resourceManagerGatewayRetriever; @GuardedBy("lock") private LeaderRetriever webMonitorLeaderRetriever; @GuardedBy("lock") private RpcServiceFactory taskManagerRpcServiceFactory; /** Flag marking the mini cluster as started/running. */ private volatile boolean running; @GuardedBy("lock") private RpcSystem rpcSystem; /** * Creates a new Flink mini cluster based on the given configuration. * * @param miniClusterConfiguration The configuration for the mini cluster */ public MiniCluster(MiniClusterConfiguration miniClusterConfiguration) { this.miniClusterConfiguration = checkNotNull(miniClusterConfiguration, "config may not be null"); this.rpcServices = new ArrayList<>( 1 + 2 + miniClusterConfiguration .getNumTaskManagers()); this.dispatcherResourceManagerComponents = new ArrayList<>(1); this.rpcTimeout = miniClusterConfiguration.getRpcTimeout(); this.terminationFuture = CompletableFuture.completedFuture(null); running = false; this.taskManagers = new ArrayList<>(miniClusterConfiguration.getNumTaskManagers()); } public CompletableFuture<URI> getRestAddress() { synchronized (lock) { checkState(running, "MiniCluster is not yet running or has already been shut down."); return webMonitorLeaderRetriever .getLeaderFuture() .thenApply( FunctionUtils.uncheckedFunction( addressLeaderIdTuple -> new URI(addressLeaderIdTuple.f0))); } } public ClusterInformation getClusterInformation() { synchronized (lock) { checkState(running, "MiniCluster is not yet running or has already been shut down."); return new ClusterInformation("localhost", blobServer.getPort()); } } protected Executor getIOExecutor() { return ioExecutor; } /** Checks if the mini cluster was started and is running. */ public boolean isRunning() { return running; } /** * Starts the mini cluster, based on the configured properties. * * @throws Exception This method passes on any exception that occurs during the startup of the * mini cluster. */ public void start() throws Exception { synchronized (lock) { checkState(!running, "MiniCluster is already running"); LOG.info("Starting Flink Mini Cluster"); LOG.debug("Using configuration {}", miniClusterConfiguration); final Configuration configuration = miniClusterConfiguration.getConfiguration(); final boolean useSingleRpcService = miniClusterConfiguration.getRpcServiceSharing() == RpcServiceSharing.SHARED; try { initializeIOFormatClasses(configuration); rpcSystem = RpcSystemLoader.load(configuration); LOG.info("Starting Metrics Registry"); metricRegistry = createMetricRegistry( configuration, rpcSystem.getMaximumMessageSizeInBytes(configuration)); LOG.info("Starting RPC Service(s)"); final RpcServiceFactory dispatcherResourceManagerComponentRpcServiceFactory; final RpcService metricQueryServiceRpcService; if (useSingleRpcService) { commonRpcService = createLocalRpcService(configuration, rpcSystem); final CommonRpcServiceFactory commonRpcServiceFactory = new CommonRpcServiceFactory(commonRpcService); taskManagerRpcServiceFactory = commonRpcServiceFactory; dispatcherResourceManagerComponentRpcServiceFactory = commonRpcServiceFactory; metricQueryServiceRpcService = MetricUtils.startLocalMetricsRpcService(configuration, rpcSystem); } else { final String jobManagerExternalAddress = miniClusterConfiguration.getJobManagerExternalAddress(); final String taskManagerExternalAddress = miniClusterConfiguration.getTaskManagerExternalAddress(); final String jobManagerExternalPortRange = miniClusterConfiguration.getJobManagerExternalPortRange(); final String taskManagerExternalPortRange = miniClusterConfiguration.getTaskManagerExternalPortRange(); final String jobManagerBindAddress = miniClusterConfiguration.getJobManagerBindAddress(); final String taskManagerBindAddress = miniClusterConfiguration.getTaskManagerBindAddress(); dispatcherResourceManagerComponentRpcServiceFactory = new DedicatedRpcServiceFactory( configuration, jobManagerExternalAddress, jobManagerExternalPortRange, jobManagerBindAddress, rpcSystem); taskManagerRpcServiceFactory = new DedicatedRpcServiceFactory( configuration, taskManagerExternalAddress, taskManagerExternalPortRange, taskManagerBindAddress, rpcSystem); commonRpcService = createRemoteRpcService( configuration, jobManagerBindAddress, 0, rpcSystem); metricQueryServiceRpcService = MetricUtils.startRemoteMetricsRpcService( configuration, commonRpcService.getAddress(), rpcSystem); } metricRegistry.startQueryService(metricQueryServiceRpcService, null); processMetricGroup = MetricUtils.instantiateProcessMetricGroup( metricRegistry, RpcUtils.getHostname(commonRpcService), ConfigurationUtils.getSystemResourceMetricsProbingInterval( configuration)); ioExecutor = Executors.newFixedThreadPool( ClusterEntrypointUtils.getPoolSize(configuration), new ExecutorThreadFactory("mini-cluster-io")); haServices = createHighAvailabilityServices(configuration, ioExecutor); blobServer = new BlobServer(configuration, haServices.createBlobStore()); blobServer.start(); heartbeatServices = HeartbeatServices.fromConfiguration(configuration); blobCacheService = new BlobCacheService( configuration, haServices.createBlobStore(), new InetSocketAddress( InetAddress.getLocalHost(), blobServer.getPort())); startTaskManagers(); MetricQueryServiceRetriever metricQueryServiceRetriever = new RpcMetricQueryServiceRetriever( metricRegistry.getMetricQueryServiceRpcService()); setupDispatcherResourceManagerComponents( configuration, dispatcherResourceManagerComponentRpcServiceFactory, metricQueryServiceRetriever); resourceManagerLeaderRetriever = haServices.getResourceManagerLeaderRetriever(); dispatcherLeaderRetriever = haServices.getDispatcherLeaderRetriever(); clusterRestEndpointLeaderRetrievalService = haServices.getClusterRestEndpointLeaderRetriever(); dispatcherGatewayRetriever = new RpcGatewayRetriever<>( commonRpcService, DispatcherGateway.class, DispatcherId::fromUuid, new ExponentialBackoffRetryStrategy( 21, Duration.ofMillis(5L), Duration.ofMillis(20L))); resourceManagerGatewayRetriever = new RpcGatewayRetriever<>( commonRpcService, ResourceManagerGateway.class, ResourceManagerId::fromUuid, new ExponentialBackoffRetryStrategy( 21, Duration.ofMillis(5L), Duration.ofMillis(20L))); webMonitorLeaderRetriever = new LeaderRetriever(); resourceManagerLeaderRetriever.start(resourceManagerGatewayRetriever); dispatcherLeaderRetriever.start(dispatcherGatewayRetriever); clusterRestEndpointLeaderRetrievalService.start(webMonitorLeaderRetriever); } catch (Exception e) { try { close(); } catch (Exception ee) { e.addSuppressed(ee); } throw e; } terminationFuture = new CompletableFuture<>(); running = true; LOG.info("Flink Mini Cluster started successfully"); } } @GuardedBy("lock") private void setupDispatcherResourceManagerComponents( Configuration configuration, RpcServiceFactory dispatcherResourceManagerComponentRpcServiceFactory, MetricQueryServiceRetriever metricQueryServiceRetriever) throws Exception { dispatcherResourceManagerComponents.addAll( createDispatcherResourceManagerComponents( configuration, dispatcherResourceManagerComponentRpcServiceFactory, haServices, blobServer, heartbeatServices, metricRegistry, metricQueryServiceRetriever, new ShutDownFatalErrorHandler())); final Collection<CompletableFuture<ApplicationStatus>> shutDownFutures = new ArrayList<>(dispatcherResourceManagerComponents.size()); for (DispatcherResourceManagerComponent dispatcherResourceManagerComponent : dispatcherResourceManagerComponents) { final CompletableFuture<ApplicationStatus> shutDownFuture = dispatcherResourceManagerComponent.getShutDownFuture(); FutureUtils.assertNoException( shutDownFuture.thenRun(dispatcherResourceManagerComponent::closeAsync)); shutDownFutures.add(shutDownFuture); } FutureUtils.assertNoException( FutureUtils.completeAll(shutDownFutures).thenRun(this::closeAsync)); } @VisibleForTesting protected Collection<? extends DispatcherResourceManagerComponent> createDispatcherResourceManagerComponents( Configuration configuration, RpcServiceFactory rpcServiceFactory, HighAvailabilityServices haServices, BlobServer blobServer, HeartbeatServices heartbeatServices, MetricRegistry metricRegistry, MetricQueryServiceRetriever metricQueryServiceRetriever, FatalErrorHandler fatalErrorHandler) throws Exception { DispatcherResourceManagerComponentFactory dispatcherResourceManagerComponentFactory = createDispatcherResourceManagerComponentFactory(); return Collections.singleton( dispatcherResourceManagerComponentFactory.create( configuration, ioExecutor, rpcServiceFactory.createRpcService(), haServices, blobServer, heartbeatServices, metricRegistry, new MemoryExecutionGraphInfoStore(), metricQueryServiceRetriever, fatalErrorHandler)); } @Nonnull DispatcherResourceManagerComponentFactory createDispatcherResourceManagerComponentFactory() { return DefaultDispatcherResourceManagerComponentFactory.createSessionComponentFactory( StandaloneResourceManagerFactory.getInstance()); } @VisibleForTesting protected HighAvailabilityServices createHighAvailabilityServices( Configuration configuration, Executor executor) throws Exception { LOG.info("Starting high-availability services"); final HaServices haServices = miniClusterConfiguration.getHaServices(); switch (haServices) { case WITH_LEADERSHIP_CONTROL: return new EmbeddedHaServicesWithLeadershipControl(executor); case CONFIGURED: return HighAvailabilityServicesUtils.createAvailableOrEmbeddedServices( configuration, executor); default: throw new IllegalConfigurationException("Unkown HA Services " + haServices); } } /** * Returns {@link HaLeadershipControl} if enabled. * * <p>{@link HaLeadershipControl} allows granting and revoking leadership of HA components, e.g. * JobManager. The method return {@link Optional * {@link MiniClusterConfiguration}. * * <p>Enabling this feature disables {@link HighAvailabilityOptions */ public Optional<HaLeadershipControl> getHaLeadershipControl() { synchronized (lock) { return haServices instanceof HaLeadershipControl ? Optional.of((HaLeadershipControl) haServices) : Optional.empty(); } } /** * Shuts down the mini cluster, failing all currently executing jobs. The mini cluster can be * started again by calling the {@link * * <p>This method shuts down all started services and components, even if an exception occurs in * the process of shutting down some component. * * @return Future which is completed once the MiniCluster has been completely shut down */ @Override public CompletableFuture<Void> closeAsync() { synchronized (lock) { if (running) { LOG.info("Shutting down Flink Mini Cluster"); try { final long shutdownTimeoutMillis = miniClusterConfiguration .getConfiguration() .getLong(ClusterOptions.CLUSTER_SERVICES_SHUTDOWN_TIMEOUT); final int numComponents = 2 + miniClusterConfiguration.getNumTaskManagers(); final Collection<CompletableFuture<Void>> componentTerminationFutures = new ArrayList<>(numComponents); componentTerminationFutures.addAll(terminateTaskManagers()); componentTerminationFutures.add(shutDownResourceManagerComponents()); final FutureUtils.ConjunctFuture<Void> componentsTerminationFuture = FutureUtils.completeAll(componentTerminationFutures); final CompletableFuture<Void> metricSystemTerminationFuture = FutureUtils.composeAfterwards( componentsTerminationFuture, this::closeMetricSystem); final CompletableFuture<Void> rpcServicesTerminationFuture = FutureUtils.composeAfterwards( metricSystemTerminationFuture, this::terminateRpcServices); final CompletableFuture<Void> remainingServicesTerminationFuture = FutureUtils.runAfterwards( rpcServicesTerminationFuture, this::terminateMiniClusterServices); final CompletableFuture<Void> executorsTerminationFuture = FutureUtils.composeAfterwards( remainingServicesTerminationFuture, () -> terminateExecutors(shutdownTimeoutMillis)); executorsTerminationFuture.whenComplete( (Void ignored, Throwable throwable) -> { if (throwable != null) { terminationFuture.completeExceptionally( ExceptionUtils.stripCompletionException(throwable)); } else { terminationFuture.complete(null); } }); } finally { running = false; } } return terminationFuture; } } private CompletableFuture<Void> closeMetricSystem() { synchronized (lock) { final ArrayList<CompletableFuture<Void>> terminationFutures = new ArrayList<>(2); if (processMetricGroup != null) { processMetricGroup.close(); processMetricGroup = null; } if (metricRegistry != null) { terminationFutures.add(metricRegistry.shutdown()); metricRegistry = null; } return FutureUtils.completeAll(terminationFutures); } } @GuardedBy("lock") private void startTaskManagers() throws Exception { final int numTaskManagers = miniClusterConfiguration.getNumTaskManagers(); LOG.info("Starting {} TaskManger(s)", numTaskManagers); for (int i = 0; i < numTaskManagers; i++) { startTaskManager(); } } /** * Starts additional TaskManager process. * * <p>When the MiniCluster starts up, it always starts {@link * MiniClusterConfiguration * 0 to the number of TaskManagers, started so far, minus one. This method starts a TaskManager * with the next index which is the number of TaskManagers, started so far. The index always * increases with each new started TaskManager. The indices of terminated TaskManagers are not * reused after {@link */ public void startTaskManager() throws Exception { synchronized (lock) { final Configuration configuration = miniClusterConfiguration.getConfiguration(); final TaskExecutor taskExecutor = TaskManagerRunner.startTaskManager( configuration, new ResourceID(UUID.randomUUID().toString()), taskManagerRpcServiceFactory.createRpcService(), haServices, heartbeatServices, metricRegistry, blobCacheService, useLocalCommunication(), ExternalResourceInfoProvider.NO_EXTERNAL_RESOURCES, taskManagerTerminatingFatalErrorHandlerFactory.create( taskManagers.size())); taskExecutor.start(); taskManagers.add(taskExecutor); } } @VisibleForTesting protected boolean useLocalCommunication() { return miniClusterConfiguration.getNumTaskManagers() == 1; } @GuardedBy("lock") private Collection<? extends CompletableFuture<Void>> terminateTaskManagers() { final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(taskManagers.size()); for (int i = 0; i < taskManagers.size(); i++) { terminationFutures.add(terminateTaskManager(i)); } return terminationFutures; } /** * Terminates a TaskManager with the given index. * * <p>See {@link * terminates a TaskManager with a given index but it does not clear the index. The index stays * occupied for the lifetime of the MiniCluster and its TaskManager stays terminated. The index * is not reused if more TaskManagers are started with {@link * * @param index index of the TaskManager to terminate * @return {@link CompletableFuture} of the given TaskManager termination */ public CompletableFuture<Void> terminateTaskManager(int index) { synchronized (lock) { final TaskExecutor taskExecutor = taskManagers.get(index); return taskExecutor.closeAsync(); } } public CompletableFuture<Collection<JobStatusMessage>> listJobs() { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway .requestMultipleJobDetails(rpcTimeout) .thenApply( jobs -> jobs.getJobs().stream() .map( details -> new JobStatusMessage( details.getJobId(), details .getJobName(), details.getStatus(), details .getStartTime())) .collect(Collectors.toList()))); } public CompletableFuture<JobStatus> getJobStatus(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestJobStatus(jobId, rpcTimeout)); } public CompletableFuture<Acknowledge> cancelJob(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.cancelJob(jobId, rpcTimeout)); } public CompletableFuture<String> triggerSavepoint( JobID jobId, String targetDirectory, boolean cancelJob) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.triggerSavepoint( jobId, targetDirectory, cancelJob, rpcTimeout)); } public CompletableFuture<String> stopWithSavepoint( JobID jobId, String targetDirectory, boolean terminate) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.stopWithSavepoint( jobId, targetDirectory, terminate, rpcTimeout)); } public CompletableFuture<Acknowledge> disposeSavepoint(String savepointPath) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.disposeSavepoint(savepointPath, rpcTimeout)); } public CompletableFuture<? extends AccessExecutionGraph> getExecutionGraph(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestJob(jobId, rpcTimeout)); } public CompletableFuture<CoordinationResponse> deliverCoordinationRequestToCoordinator( JobID jobId, OperatorID operatorId, SerializedValue<CoordinationRequest> serializedRequest) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.deliverCoordinationRequestToCoordinator( jobId, operatorId, serializedRequest, rpcTimeout)); } private <T> CompletableFuture<T> runDispatcherCommand( Function<DispatcherGateway, CompletableFuture<T>> dispatcherCommand) { return getDispatcherGatewayFuture() .thenApply(dispatcherCommand) .thenCompose(Function.identity()); } /** * This method executes a job in detached mode. The method returns immediately after the job has * been added to the * * @param job The Flink job to execute * @throws JobExecutionException Thrown if anything went amiss during initial job launch, or if * the job terminally failed. */ public void runDetached(JobGraph job) throws JobExecutionException, InterruptedException { checkNotNull(job, "job is null"); final CompletableFuture<JobSubmissionResult> submissionFuture = submitJob(job); try { submissionFuture.get(); } catch (ExecutionException e) { throw new JobExecutionException( job.getJobID(), ExceptionUtils.stripExecutionException(e)); } } /** * This method runs a job in blocking mode. The method returns only after the job completed * successfully, or after it failed terminally. * * @param job The Flink job to execute * @return The result of the job execution * @throws JobExecutionException Thrown if anything went amiss during initial job launch, or if * the job terminally failed. */ public JobExecutionResult executeJobBlocking(JobGraph job) throws JobExecutionException, InterruptedException { checkNotNull(job, "job is null"); final CompletableFuture<JobSubmissionResult> submissionFuture = submitJob(job); final CompletableFuture<JobResult> jobResultFuture = submissionFuture.thenCompose( (JobSubmissionResult ignored) -> requestJobResult(job.getJobID())); final JobResult jobResult; try { jobResult = jobResultFuture.get(); } catch (ExecutionException e) { throw new JobExecutionException( job.getJobID(), "Could not retrieve JobResult.", ExceptionUtils.stripExecutionException(e)); } try { return jobResult.toJobExecutionResult(Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new JobExecutionException(job.getJobID(), e); } } public CompletableFuture<JobSubmissionResult> submitJob(JobGraph jobGraph) { final CompletableFuture<DispatcherGateway> dispatcherGatewayFuture = getDispatcherGatewayFuture(); final CompletableFuture<InetSocketAddress> blobServerAddressFuture = createBlobServerAddress(dispatcherGatewayFuture); final CompletableFuture<Void> jarUploadFuture = uploadAndSetJobFiles(blobServerAddressFuture, jobGraph); final CompletableFuture<Acknowledge> acknowledgeCompletableFuture = jarUploadFuture .thenCombine( dispatcherGatewayFuture, (Void ack, DispatcherGateway dispatcherGateway) -> dispatcherGateway.submitJob(jobGraph, rpcTimeout)) .thenCompose(Function.identity()); return acknowledgeCompletableFuture.thenApply( (Acknowledge ignored) -> new JobSubmissionResult(jobGraph.getJobID())); } public CompletableFuture<JobResult> requestJobResult(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestJobResult(jobId, RpcUtils.INF_TIMEOUT)); } public CompletableFuture<ClusterOverview> requestClusterOverview() { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestClusterOverview(RpcUtils.INF_TIMEOUT)); } @VisibleForTesting protected CompletableFuture<DispatcherGateway> getDispatcherGatewayFuture() { synchronized (lock) { checkState(running, "MiniCluster is not yet running or has already been shut down."); return dispatcherGatewayRetriever.getFuture(); } } private CompletableFuture<Void> uploadAndSetJobFiles( final CompletableFuture<InetSocketAddress> blobServerAddressFuture, final JobGraph job) { return blobServerAddressFuture.thenAccept( blobServerAddress -> { try { ClientUtils.extractAndUploadJobGraphFiles( job, () -> new BlobClient( blobServerAddress, miniClusterConfiguration.getConfiguration())); } catch (FlinkException e) { throw new CompletionException(e); } }); } private CompletableFuture<InetSocketAddress> createBlobServerAddress( final CompletableFuture<DispatcherGateway> dispatcherGatewayFuture) { return dispatcherGatewayFuture .thenApply( dispatcherGateway -> dispatcherGateway .getBlobServerPort(rpcTimeout) .thenApply( blobServerPort -> new InetSocketAddress( dispatcherGateway.getHostname(), blobServerPort))) .thenCompose(Function.identity()); } /** * Factory method to create the metric registry for the mini cluster. * * @param config The configuration of the mini cluster * @param maximumMessageSizeInBytes the maximum message size */ protected MetricRegistryImpl createMetricRegistry( Configuration config, long maximumMessageSizeInBytes) { return new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(config, maximumMessageSizeInBytes), ReporterSetup.fromConfiguration(config, null)); } /** * Factory method to instantiate the remote RPC service. * * @param configuration Flink configuration. * @param bindAddress The address to bind the RPC service to. * @param bindPort The port range to bind the RPC service to. * @param rpcSystem * @return The instantiated RPC service */ protected RpcService createRemoteRpcService( Configuration configuration, String bindAddress, int bindPort, RpcSystem rpcSystem) throws Exception { return rpcSystem .remoteServiceBuilder(configuration, bindAddress, String.valueOf(bindPort)) .withBindAddress(bindAddress) .withBindPort(bindPort) .withExecutorConfiguration(RpcUtils.getTestForkJoinExecutorConfiguration()) .createAndStart(); } /** * Factory method to instantiate the remote RPC service. * * @param configuration Flink configuration. * @param externalAddress The external address to access the RPC service. * @param externalPortRange The external port range to access the RPC service. * @param bindAddress The address to bind the RPC service to. * @param rpcSystem * @return The instantiated RPC service */ protected RpcService createRemoteRpcService( Configuration configuration, String externalAddress, String externalPortRange, String bindAddress, RpcSystem rpcSystem) throws Exception { return rpcSystem .remoteServiceBuilder(configuration, externalAddress, externalPortRange) .withBindAddress(bindAddress) .withExecutorConfiguration(RpcUtils.getTestForkJoinExecutorConfiguration()) .createAndStart(); } /** * Factory method to instantiate the local RPC service. * * @param configuration Flink configuration. * @param rpcSystem * @return The instantiated RPC service */ protected RpcService createLocalRpcService(Configuration configuration, RpcSystem rpcSystem) throws Exception { return rpcSystem .localServiceBuilder(configuration) .withExecutorConfiguration(RpcUtils.getTestForkJoinExecutorConfiguration()) .createAndStart(); } @GuardedBy("lock") private CompletableFuture<Void> shutDownResourceManagerComponents() { final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(dispatcherResourceManagerComponents.size()); for (DispatcherResourceManagerComponent dispatcherResourceManagerComponent : dispatcherResourceManagerComponents) { terminationFutures.add(dispatcherResourceManagerComponent.closeAsync()); } final FutureUtils.ConjunctFuture<Void> dispatcherTerminationFuture = FutureUtils.completeAll(terminationFutures); return FutureUtils.runAfterwards( dispatcherTerminationFuture, () -> { Exception exception = null; synchronized (lock) { if (resourceManagerLeaderRetriever != null) { try { resourceManagerLeaderRetriever.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } resourceManagerLeaderRetriever = null; } if (dispatcherLeaderRetriever != null) { try { dispatcherLeaderRetriever.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } dispatcherLeaderRetriever = null; } if (clusterRestEndpointLeaderRetrievalService != null) { try { clusterRestEndpointLeaderRetrievalService.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } clusterRestEndpointLeaderRetrievalService = null; } } if (exception != null) { throw exception; } }); } @Nonnull private CompletableFuture<Void> terminateRpcServices() { synchronized (lock) { final int numRpcServices = 1 + rpcServices.size(); final Collection<CompletableFuture<?>> rpcTerminationFutures = new ArrayList<>(numRpcServices); rpcTerminationFutures.add(commonRpcService.stopService()); for (RpcService rpcService : rpcServices) { rpcTerminationFutures.add(rpcService.stopService()); } commonRpcService = null; rpcServices.clear(); return FutureUtils.completeAll(rpcTerminationFutures); } } private CompletableFuture<Void> terminateExecutors(long executorShutdownTimeoutMillis) { synchronized (lock) { if (ioExecutor != null) { return ExecutorUtils.nonBlockingShutdown( executorShutdownTimeoutMillis, TimeUnit.MILLISECONDS, ioExecutor); } else { return CompletableFuture.completedFuture(null); } } } /** Internal factory for {@link RpcService}. */ protected interface RpcServiceFactory { RpcService createRpcService() throws Exception; } /** Factory which returns always the common {@link RpcService}. */ protected static class CommonRpcServiceFactory implements RpcServiceFactory { private final RpcService commonRpcService; CommonRpcServiceFactory(RpcService commonRpcService) { this.commonRpcService = commonRpcService; } @Override public RpcService createRpcService() { return commonRpcService; } } /** Factory which creates and registers new {@link RpcService}. */ protected class DedicatedRpcServiceFactory implements RpcServiceFactory { private final Configuration configuration; private final String externalAddress; private final String externalPortRange; private final String bindAddress; private final RpcSystem rpcSystem; DedicatedRpcServiceFactory( Configuration configuration, String externalAddress, String externalPortRange, String bindAddress, RpcSystem rpcSystem) { this.configuration = configuration; this.externalAddress = externalAddress; this.externalPortRange = externalPortRange; this.bindAddress = bindAddress; this.rpcSystem = rpcSystem; } @Override public RpcService createRpcService() throws Exception { final RpcService rpcService = MiniCluster.this.createRemoteRpcService( configuration, externalAddress, externalPortRange, bindAddress, rpcSystem); synchronized (lock) { rpcServices.add(rpcService); } return rpcService; } } private void initializeIOFormatClasses(Configuration configuration) { FileOutputFormat.initDefaultsFromConfiguration(configuration); } private class TerminatingFatalErrorHandler implements FatalErrorHandler { private final int index; private TerminatingFatalErrorHandler(int index) { this.index = index; } @Override public void onFatalError(Throwable exception) { if (running) { LOG.error("TaskManager synchronized (lock) { taskManagers.get(index).closeAsync(); } } } } private class ShutDownFatalErrorHandler implements FatalErrorHandler { @Override public void onFatalError(Throwable exception) { LOG.warn("Error in MiniCluster. Shutting the MiniCluster down.", exception); closeAsync(); } } private class TerminatingFatalErrorHandlerFactory { /** * Create a new {@link TerminatingFatalErrorHandler} for the {@link TaskExecutor} with the * given index. * * @param index into the {@link * TaskExecutor}. * @return {@link TerminatingFatalErrorHandler} for the given index */ @GuardedBy("lock") private TerminatingFatalErrorHandler create(int index) { return new TerminatingFatalErrorHandler(index); } } /** HA Services to use. */ public enum HaServices { /** Uses the configured HA Services in {@link HighAvailabilityOptions CONFIGURED, /** * Enables or disables {@link HaLeadershipControl} in {@link * MiniCluster * * <p>{@link HaLeadershipControl} allows granting and revoking leadership of HA components. * Enabling this feature disables {@link HighAvailabilityOptions */ WITH_LEADERSHIP_CONTROL } }
class MiniCluster implements AutoCloseableAsync { private static final Logger LOG = LoggerFactory.getLogger(MiniCluster.class); /** The lock to guard startup / shutdown / manipulation methods. */ private final Object lock = new Object(); /** The configuration for this mini cluster. */ private final MiniClusterConfiguration miniClusterConfiguration; private final Time rpcTimeout; @GuardedBy("lock") private final List<TaskExecutor> taskManagers; private final TerminatingFatalErrorHandlerFactory taskManagerTerminatingFatalErrorHandlerFactory = new TerminatingFatalErrorHandlerFactory(); private CompletableFuture<Void> terminationFuture; @GuardedBy("lock") private MetricRegistryImpl metricRegistry; @GuardedBy("lock") private ProcessMetricGroup processMetricGroup; @GuardedBy("lock") private RpcService commonRpcService; @GuardedBy("lock") private ExecutorService ioExecutor; @GuardedBy("lock") private final Collection<RpcService> rpcServices; @GuardedBy("lock") private HighAvailabilityServices haServices; @GuardedBy("lock") private BlobServer blobServer; @GuardedBy("lock") private HeartbeatServices heartbeatServices; @GuardedBy("lock") private BlobCacheService blobCacheService; @GuardedBy("lock") private LeaderRetrievalService resourceManagerLeaderRetriever; @GuardedBy("lock") private LeaderRetrievalService dispatcherLeaderRetriever; @GuardedBy("lock") private LeaderRetrievalService clusterRestEndpointLeaderRetrievalService; @GuardedBy("lock") private Collection<DispatcherResourceManagerComponent> dispatcherResourceManagerComponents; @GuardedBy("lock") private RpcGatewayRetriever<DispatcherId, DispatcherGateway> dispatcherGatewayRetriever; @GuardedBy("lock") private RpcGatewayRetriever<ResourceManagerId, ResourceManagerGateway> resourceManagerGatewayRetriever; @GuardedBy("lock") private LeaderRetriever webMonitorLeaderRetriever; @GuardedBy("lock") private RpcServiceFactory taskManagerRpcServiceFactory; /** Flag marking the mini cluster as started/running. */ private volatile boolean running; @GuardedBy("lock") private RpcSystem rpcSystem; /** * Creates a new Flink mini cluster based on the given configuration. * * @param miniClusterConfiguration The configuration for the mini cluster */ public MiniCluster(MiniClusterConfiguration miniClusterConfiguration) { this.miniClusterConfiguration = checkNotNull(miniClusterConfiguration, "config may not be null"); this.rpcServices = new ArrayList<>( 1 + 2 + miniClusterConfiguration .getNumTaskManagers()); this.dispatcherResourceManagerComponents = new ArrayList<>(1); this.rpcTimeout = miniClusterConfiguration.getRpcTimeout(); this.terminationFuture = CompletableFuture.completedFuture(null); running = false; this.taskManagers = new ArrayList<>(miniClusterConfiguration.getNumTaskManagers()); } public CompletableFuture<URI> getRestAddress() { synchronized (lock) { checkState(running, "MiniCluster is not yet running or has already been shut down."); return webMonitorLeaderRetriever .getLeaderFuture() .thenApply( FunctionUtils.uncheckedFunction( addressLeaderIdTuple -> new URI(addressLeaderIdTuple.f0))); } } public ClusterInformation getClusterInformation() { synchronized (lock) { checkState(running, "MiniCluster is not yet running or has already been shut down."); return new ClusterInformation("localhost", blobServer.getPort()); } } protected Executor getIOExecutor() { return ioExecutor; } /** Checks if the mini cluster was started and is running. */ public boolean isRunning() { return running; } /** * Starts the mini cluster, based on the configured properties. * * @throws Exception This method passes on any exception that occurs during the startup of the * mini cluster. */ public void start() throws Exception { synchronized (lock) { checkState(!running, "MiniCluster is already running"); LOG.info("Starting Flink Mini Cluster"); LOG.debug("Using configuration {}", miniClusterConfiguration); final Configuration configuration = miniClusterConfiguration.getConfiguration(); final boolean useSingleRpcService = miniClusterConfiguration.getRpcServiceSharing() == RpcServiceSharing.SHARED; try { initializeIOFormatClasses(configuration); rpcSystem = RpcSystem.load(configuration); LOG.info("Starting Metrics Registry"); metricRegistry = createMetricRegistry( configuration, rpcSystem.getMaximumMessageSizeInBytes(configuration)); LOG.info("Starting RPC Service(s)"); final RpcServiceFactory dispatcherResourceManagerComponentRpcServiceFactory; final RpcService metricQueryServiceRpcService; if (useSingleRpcService) { commonRpcService = createLocalRpcService(configuration, rpcSystem); final CommonRpcServiceFactory commonRpcServiceFactory = new CommonRpcServiceFactory(commonRpcService); taskManagerRpcServiceFactory = commonRpcServiceFactory; dispatcherResourceManagerComponentRpcServiceFactory = commonRpcServiceFactory; metricQueryServiceRpcService = MetricUtils.startLocalMetricsRpcService(configuration, rpcSystem); } else { final String jobManagerExternalAddress = miniClusterConfiguration.getJobManagerExternalAddress(); final String taskManagerExternalAddress = miniClusterConfiguration.getTaskManagerExternalAddress(); final String jobManagerExternalPortRange = miniClusterConfiguration.getJobManagerExternalPortRange(); final String taskManagerExternalPortRange = miniClusterConfiguration.getTaskManagerExternalPortRange(); final String jobManagerBindAddress = miniClusterConfiguration.getJobManagerBindAddress(); final String taskManagerBindAddress = miniClusterConfiguration.getTaskManagerBindAddress(); dispatcherResourceManagerComponentRpcServiceFactory = new DedicatedRpcServiceFactory( configuration, jobManagerExternalAddress, jobManagerExternalPortRange, jobManagerBindAddress, rpcSystem); taskManagerRpcServiceFactory = new DedicatedRpcServiceFactory( configuration, taskManagerExternalAddress, taskManagerExternalPortRange, taskManagerBindAddress, rpcSystem); commonRpcService = createRemoteRpcService( configuration, jobManagerBindAddress, 0, rpcSystem); metricQueryServiceRpcService = MetricUtils.startRemoteMetricsRpcService( configuration, commonRpcService.getAddress(), rpcSystem); } metricRegistry.startQueryService(metricQueryServiceRpcService, null); processMetricGroup = MetricUtils.instantiateProcessMetricGroup( metricRegistry, RpcUtils.getHostname(commonRpcService), ConfigurationUtils.getSystemResourceMetricsProbingInterval( configuration)); ioExecutor = Executors.newFixedThreadPool( ClusterEntrypointUtils.getPoolSize(configuration), new ExecutorThreadFactory("mini-cluster-io")); haServices = createHighAvailabilityServices(configuration, ioExecutor); blobServer = new BlobServer(configuration, haServices.createBlobStore()); blobServer.start(); heartbeatServices = HeartbeatServices.fromConfiguration(configuration); blobCacheService = new BlobCacheService( configuration, haServices.createBlobStore(), new InetSocketAddress( InetAddress.getLocalHost(), blobServer.getPort())); startTaskManagers(); MetricQueryServiceRetriever metricQueryServiceRetriever = new RpcMetricQueryServiceRetriever( metricRegistry.getMetricQueryServiceRpcService()); setupDispatcherResourceManagerComponents( configuration, dispatcherResourceManagerComponentRpcServiceFactory, metricQueryServiceRetriever); resourceManagerLeaderRetriever = haServices.getResourceManagerLeaderRetriever(); dispatcherLeaderRetriever = haServices.getDispatcherLeaderRetriever(); clusterRestEndpointLeaderRetrievalService = haServices.getClusterRestEndpointLeaderRetriever(); dispatcherGatewayRetriever = new RpcGatewayRetriever<>( commonRpcService, DispatcherGateway.class, DispatcherId::fromUuid, new ExponentialBackoffRetryStrategy( 21, Duration.ofMillis(5L), Duration.ofMillis(20L))); resourceManagerGatewayRetriever = new RpcGatewayRetriever<>( commonRpcService, ResourceManagerGateway.class, ResourceManagerId::fromUuid, new ExponentialBackoffRetryStrategy( 21, Duration.ofMillis(5L), Duration.ofMillis(20L))); webMonitorLeaderRetriever = new LeaderRetriever(); resourceManagerLeaderRetriever.start(resourceManagerGatewayRetriever); dispatcherLeaderRetriever.start(dispatcherGatewayRetriever); clusterRestEndpointLeaderRetrievalService.start(webMonitorLeaderRetriever); } catch (Exception e) { try { close(); } catch (Exception ee) { e.addSuppressed(ee); } throw e; } terminationFuture = new CompletableFuture<>(); running = true; LOG.info("Flink Mini Cluster started successfully"); } } @GuardedBy("lock") private void setupDispatcherResourceManagerComponents( Configuration configuration, RpcServiceFactory dispatcherResourceManagerComponentRpcServiceFactory, MetricQueryServiceRetriever metricQueryServiceRetriever) throws Exception { dispatcherResourceManagerComponents.addAll( createDispatcherResourceManagerComponents( configuration, dispatcherResourceManagerComponentRpcServiceFactory, haServices, blobServer, heartbeatServices, metricRegistry, metricQueryServiceRetriever, new ShutDownFatalErrorHandler())); final Collection<CompletableFuture<ApplicationStatus>> shutDownFutures = new ArrayList<>(dispatcherResourceManagerComponents.size()); for (DispatcherResourceManagerComponent dispatcherResourceManagerComponent : dispatcherResourceManagerComponents) { final CompletableFuture<ApplicationStatus> shutDownFuture = dispatcherResourceManagerComponent.getShutDownFuture(); FutureUtils.assertNoException( shutDownFuture.thenRun(dispatcherResourceManagerComponent::closeAsync)); shutDownFutures.add(shutDownFuture); } FutureUtils.assertNoException( FutureUtils.completeAll(shutDownFutures).thenRun(this::closeAsync)); } @VisibleForTesting protected Collection<? extends DispatcherResourceManagerComponent> createDispatcherResourceManagerComponents( Configuration configuration, RpcServiceFactory rpcServiceFactory, HighAvailabilityServices haServices, BlobServer blobServer, HeartbeatServices heartbeatServices, MetricRegistry metricRegistry, MetricQueryServiceRetriever metricQueryServiceRetriever, FatalErrorHandler fatalErrorHandler) throws Exception { DispatcherResourceManagerComponentFactory dispatcherResourceManagerComponentFactory = createDispatcherResourceManagerComponentFactory(); return Collections.singleton( dispatcherResourceManagerComponentFactory.create( configuration, ioExecutor, rpcServiceFactory.createRpcService(), haServices, blobServer, heartbeatServices, metricRegistry, new MemoryExecutionGraphInfoStore(), metricQueryServiceRetriever, fatalErrorHandler)); } @Nonnull DispatcherResourceManagerComponentFactory createDispatcherResourceManagerComponentFactory() { return DefaultDispatcherResourceManagerComponentFactory.createSessionComponentFactory( StandaloneResourceManagerFactory.getInstance()); } @VisibleForTesting protected HighAvailabilityServices createHighAvailabilityServices( Configuration configuration, Executor executor) throws Exception { LOG.info("Starting high-availability services"); final HaServices haServices = miniClusterConfiguration.getHaServices(); switch (haServices) { case WITH_LEADERSHIP_CONTROL: return new EmbeddedHaServicesWithLeadershipControl(executor); case CONFIGURED: return HighAvailabilityServicesUtils.createAvailableOrEmbeddedServices( configuration, executor); default: throw new IllegalConfigurationException("Unkown HA Services " + haServices); } } /** * Returns {@link HaLeadershipControl} if enabled. * * <p>{@link HaLeadershipControl} allows granting and revoking leadership of HA components, e.g. * JobManager. The method return {@link Optional * {@link MiniClusterConfiguration}. * * <p>Enabling this feature disables {@link HighAvailabilityOptions */ public Optional<HaLeadershipControl> getHaLeadershipControl() { synchronized (lock) { return haServices instanceof HaLeadershipControl ? Optional.of((HaLeadershipControl) haServices) : Optional.empty(); } } /** * Shuts down the mini cluster, failing all currently executing jobs. The mini cluster can be * started again by calling the {@link * * <p>This method shuts down all started services and components, even if an exception occurs in * the process of shutting down some component. * * @return Future which is completed once the MiniCluster has been completely shut down */ @Override public CompletableFuture<Void> closeAsync() { synchronized (lock) { if (running) { LOG.info("Shutting down Flink Mini Cluster"); try { final long shutdownTimeoutMillis = miniClusterConfiguration .getConfiguration() .getLong(ClusterOptions.CLUSTER_SERVICES_SHUTDOWN_TIMEOUT); final int numComponents = 2 + miniClusterConfiguration.getNumTaskManagers(); final Collection<CompletableFuture<Void>> componentTerminationFutures = new ArrayList<>(numComponents); componentTerminationFutures.addAll(terminateTaskManagers()); componentTerminationFutures.add(shutDownResourceManagerComponents()); final FutureUtils.ConjunctFuture<Void> componentsTerminationFuture = FutureUtils.completeAll(componentTerminationFutures); final CompletableFuture<Void> metricSystemTerminationFuture = FutureUtils.composeAfterwards( componentsTerminationFuture, this::closeMetricSystem); final CompletableFuture<Void> rpcServicesTerminationFuture = FutureUtils.composeAfterwards( metricSystemTerminationFuture, this::terminateRpcServices); final CompletableFuture<Void> remainingServicesTerminationFuture = FutureUtils.runAfterwards( rpcServicesTerminationFuture, this::terminateMiniClusterServices); final CompletableFuture<Void> executorsTerminationFuture = FutureUtils.composeAfterwards( remainingServicesTerminationFuture, () -> terminateExecutors(shutdownTimeoutMillis)); executorsTerminationFuture.whenComplete( (Void ignored, Throwable throwable) -> { if (throwable != null) { terminationFuture.completeExceptionally( ExceptionUtils.stripCompletionException(throwable)); } else { terminationFuture.complete(null); } }); } finally { running = false; } } return terminationFuture; } } private CompletableFuture<Void> closeMetricSystem() { synchronized (lock) { final ArrayList<CompletableFuture<Void>> terminationFutures = new ArrayList<>(2); if (processMetricGroup != null) { processMetricGroup.close(); processMetricGroup = null; } if (metricRegistry != null) { terminationFutures.add(metricRegistry.shutdown()); metricRegistry = null; } return FutureUtils.completeAll(terminationFutures); } } @GuardedBy("lock") private void startTaskManagers() throws Exception { final int numTaskManagers = miniClusterConfiguration.getNumTaskManagers(); LOG.info("Starting {} TaskManger(s)", numTaskManagers); for (int i = 0; i < numTaskManagers; i++) { startTaskManager(); } } /** * Starts additional TaskManager process. * * <p>When the MiniCluster starts up, it always starts {@link * MiniClusterConfiguration * 0 to the number of TaskManagers, started so far, minus one. This method starts a TaskManager * with the next index which is the number of TaskManagers, started so far. The index always * increases with each new started TaskManager. The indices of terminated TaskManagers are not * reused after {@link */ public void startTaskManager() throws Exception { synchronized (lock) { final Configuration configuration = miniClusterConfiguration.getConfiguration(); final TaskExecutor taskExecutor = TaskManagerRunner.startTaskManager( configuration, new ResourceID(UUID.randomUUID().toString()), taskManagerRpcServiceFactory.createRpcService(), haServices, heartbeatServices, metricRegistry, blobCacheService, useLocalCommunication(), ExternalResourceInfoProvider.NO_EXTERNAL_RESOURCES, taskManagerTerminatingFatalErrorHandlerFactory.create( taskManagers.size())); taskExecutor.start(); taskManagers.add(taskExecutor); } } @VisibleForTesting protected boolean useLocalCommunication() { return miniClusterConfiguration.getNumTaskManagers() == 1; } @GuardedBy("lock") private Collection<? extends CompletableFuture<Void>> terminateTaskManagers() { final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(taskManagers.size()); for (int i = 0; i < taskManagers.size(); i++) { terminationFutures.add(terminateTaskManager(i)); } return terminationFutures; } /** * Terminates a TaskManager with the given index. * * <p>See {@link * terminates a TaskManager with a given index but it does not clear the index. The index stays * occupied for the lifetime of the MiniCluster and its TaskManager stays terminated. The index * is not reused if more TaskManagers are started with {@link * * @param index index of the TaskManager to terminate * @return {@link CompletableFuture} of the given TaskManager termination */ public CompletableFuture<Void> terminateTaskManager(int index) { synchronized (lock) { final TaskExecutor taskExecutor = taskManagers.get(index); return taskExecutor.closeAsync(); } } public CompletableFuture<Collection<JobStatusMessage>> listJobs() { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway .requestMultipleJobDetails(rpcTimeout) .thenApply( jobs -> jobs.getJobs().stream() .map( details -> new JobStatusMessage( details.getJobId(), details .getJobName(), details.getStatus(), details .getStartTime())) .collect(Collectors.toList()))); } public CompletableFuture<JobStatus> getJobStatus(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestJobStatus(jobId, rpcTimeout)); } public CompletableFuture<Acknowledge> cancelJob(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.cancelJob(jobId, rpcTimeout)); } public CompletableFuture<String> triggerSavepoint( JobID jobId, String targetDirectory, boolean cancelJob) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.triggerSavepoint( jobId, targetDirectory, cancelJob, rpcTimeout)); } public CompletableFuture<String> stopWithSavepoint( JobID jobId, String targetDirectory, boolean terminate) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.stopWithSavepoint( jobId, targetDirectory, terminate, rpcTimeout)); } public CompletableFuture<Acknowledge> disposeSavepoint(String savepointPath) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.disposeSavepoint(savepointPath, rpcTimeout)); } public CompletableFuture<? extends AccessExecutionGraph> getExecutionGraph(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestJob(jobId, rpcTimeout)); } public CompletableFuture<CoordinationResponse> deliverCoordinationRequestToCoordinator( JobID jobId, OperatorID operatorId, SerializedValue<CoordinationRequest> serializedRequest) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.deliverCoordinationRequestToCoordinator( jobId, operatorId, serializedRequest, rpcTimeout)); } private <T> CompletableFuture<T> runDispatcherCommand( Function<DispatcherGateway, CompletableFuture<T>> dispatcherCommand) { return getDispatcherGatewayFuture() .thenApply(dispatcherCommand) .thenCompose(Function.identity()); } /** * This method executes a job in detached mode. The method returns immediately after the job has * been added to the * * @param job The Flink job to execute * @throws JobExecutionException Thrown if anything went amiss during initial job launch, or if * the job terminally failed. */ public void runDetached(JobGraph job) throws JobExecutionException, InterruptedException { checkNotNull(job, "job is null"); final CompletableFuture<JobSubmissionResult> submissionFuture = submitJob(job); try { submissionFuture.get(); } catch (ExecutionException e) { throw new JobExecutionException( job.getJobID(), ExceptionUtils.stripExecutionException(e)); } } /** * This method runs a job in blocking mode. The method returns only after the job completed * successfully, or after it failed terminally. * * @param job The Flink job to execute * @return The result of the job execution * @throws JobExecutionException Thrown if anything went amiss during initial job launch, or if * the job terminally failed. */ public JobExecutionResult executeJobBlocking(JobGraph job) throws JobExecutionException, InterruptedException { checkNotNull(job, "job is null"); final CompletableFuture<JobSubmissionResult> submissionFuture = submitJob(job); final CompletableFuture<JobResult> jobResultFuture = submissionFuture.thenCompose( (JobSubmissionResult ignored) -> requestJobResult(job.getJobID())); final JobResult jobResult; try { jobResult = jobResultFuture.get(); } catch (ExecutionException e) { throw new JobExecutionException( job.getJobID(), "Could not retrieve JobResult.", ExceptionUtils.stripExecutionException(e)); } try { return jobResult.toJobExecutionResult(Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new JobExecutionException(job.getJobID(), e); } } public CompletableFuture<JobSubmissionResult> submitJob(JobGraph jobGraph) { final CompletableFuture<DispatcherGateway> dispatcherGatewayFuture = getDispatcherGatewayFuture(); final CompletableFuture<InetSocketAddress> blobServerAddressFuture = createBlobServerAddress(dispatcherGatewayFuture); final CompletableFuture<Void> jarUploadFuture = uploadAndSetJobFiles(blobServerAddressFuture, jobGraph); final CompletableFuture<Acknowledge> acknowledgeCompletableFuture = jarUploadFuture .thenCombine( dispatcherGatewayFuture, (Void ack, DispatcherGateway dispatcherGateway) -> dispatcherGateway.submitJob(jobGraph, rpcTimeout)) .thenCompose(Function.identity()); return acknowledgeCompletableFuture.thenApply( (Acknowledge ignored) -> new JobSubmissionResult(jobGraph.getJobID())); } public CompletableFuture<JobResult> requestJobResult(JobID jobId) { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestJobResult(jobId, RpcUtils.INF_TIMEOUT)); } public CompletableFuture<ClusterOverview> requestClusterOverview() { return runDispatcherCommand( dispatcherGateway -> dispatcherGateway.requestClusterOverview(RpcUtils.INF_TIMEOUT)); } @VisibleForTesting protected CompletableFuture<DispatcherGateway> getDispatcherGatewayFuture() { synchronized (lock) { checkState(running, "MiniCluster is not yet running or has already been shut down."); return dispatcherGatewayRetriever.getFuture(); } } private CompletableFuture<Void> uploadAndSetJobFiles( final CompletableFuture<InetSocketAddress> blobServerAddressFuture, final JobGraph job) { return blobServerAddressFuture.thenAccept( blobServerAddress -> { try { ClientUtils.extractAndUploadJobGraphFiles( job, () -> new BlobClient( blobServerAddress, miniClusterConfiguration.getConfiguration())); } catch (FlinkException e) { throw new CompletionException(e); } }); } private CompletableFuture<InetSocketAddress> createBlobServerAddress( final CompletableFuture<DispatcherGateway> dispatcherGatewayFuture) { return dispatcherGatewayFuture .thenApply( dispatcherGateway -> dispatcherGateway .getBlobServerPort(rpcTimeout) .thenApply( blobServerPort -> new InetSocketAddress( dispatcherGateway.getHostname(), blobServerPort))) .thenCompose(Function.identity()); } /** * Factory method to create the metric registry for the mini cluster. * * @param config The configuration of the mini cluster * @param maximumMessageSizeInBytes the maximum message size */ protected MetricRegistryImpl createMetricRegistry( Configuration config, long maximumMessageSizeInBytes) { return new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(config, maximumMessageSizeInBytes), ReporterSetup.fromConfiguration(config, null)); } /** * Factory method to instantiate the remote RPC service. * * @param configuration Flink configuration. * @param bindAddress The address to bind the RPC service to. * @param bindPort The port range to bind the RPC service to. * @param rpcSystem * @return The instantiated RPC service */ protected RpcService createRemoteRpcService( Configuration configuration, String bindAddress, int bindPort, RpcSystem rpcSystem) throws Exception { return rpcSystem .remoteServiceBuilder(configuration, bindAddress, String.valueOf(bindPort)) .withBindAddress(bindAddress) .withBindPort(bindPort) .withExecutorConfiguration(RpcUtils.getTestForkJoinExecutorConfiguration()) .createAndStart(); } /** * Factory method to instantiate the remote RPC service. * * @param configuration Flink configuration. * @param externalAddress The external address to access the RPC service. * @param externalPortRange The external port range to access the RPC service. * @param bindAddress The address to bind the RPC service to. * @param rpcSystem * @return The instantiated RPC service */ protected RpcService createRemoteRpcService( Configuration configuration, String externalAddress, String externalPortRange, String bindAddress, RpcSystem rpcSystem) throws Exception { return rpcSystem .remoteServiceBuilder(configuration, externalAddress, externalPortRange) .withBindAddress(bindAddress) .withExecutorConfiguration(RpcUtils.getTestForkJoinExecutorConfiguration()) .createAndStart(); } /** * Factory method to instantiate the local RPC service. * * @param configuration Flink configuration. * @param rpcSystem * @return The instantiated RPC service */ protected RpcService createLocalRpcService(Configuration configuration, RpcSystem rpcSystem) throws Exception { return rpcSystem .localServiceBuilder(configuration) .withExecutorConfiguration(RpcUtils.getTestForkJoinExecutorConfiguration()) .createAndStart(); } @GuardedBy("lock") private CompletableFuture<Void> shutDownResourceManagerComponents() { final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(dispatcherResourceManagerComponents.size()); for (DispatcherResourceManagerComponent dispatcherResourceManagerComponent : dispatcherResourceManagerComponents) { terminationFutures.add(dispatcherResourceManagerComponent.closeAsync()); } final FutureUtils.ConjunctFuture<Void> dispatcherTerminationFuture = FutureUtils.completeAll(terminationFutures); return FutureUtils.runAfterwards( dispatcherTerminationFuture, () -> { Exception exception = null; synchronized (lock) { if (resourceManagerLeaderRetriever != null) { try { resourceManagerLeaderRetriever.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } resourceManagerLeaderRetriever = null; } if (dispatcherLeaderRetriever != null) { try { dispatcherLeaderRetriever.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } dispatcherLeaderRetriever = null; } if (clusterRestEndpointLeaderRetrievalService != null) { try { clusterRestEndpointLeaderRetrievalService.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } clusterRestEndpointLeaderRetrievalService = null; } } if (exception != null) { throw exception; } }); } @Nonnull private CompletableFuture<Void> terminateRpcServices() { synchronized (lock) { final int numRpcServices = 1 + rpcServices.size(); final Collection<CompletableFuture<?>> rpcTerminationFutures = new ArrayList<>(numRpcServices); rpcTerminationFutures.add(commonRpcService.stopService()); for (RpcService rpcService : rpcServices) { rpcTerminationFutures.add(rpcService.stopService()); } commonRpcService = null; rpcServices.clear(); return FutureUtils.completeAll(rpcTerminationFutures); } } private CompletableFuture<Void> terminateExecutors(long executorShutdownTimeoutMillis) { synchronized (lock) { if (ioExecutor != null) { return ExecutorUtils.nonBlockingShutdown( executorShutdownTimeoutMillis, TimeUnit.MILLISECONDS, ioExecutor); } else { return CompletableFuture.completedFuture(null); } } } /** Internal factory for {@link RpcService}. */ protected interface RpcServiceFactory { RpcService createRpcService() throws Exception; } /** Factory which returns always the common {@link RpcService}. */ protected static class CommonRpcServiceFactory implements RpcServiceFactory { private final RpcService commonRpcService; CommonRpcServiceFactory(RpcService commonRpcService) { this.commonRpcService = commonRpcService; } @Override public RpcService createRpcService() { return commonRpcService; } } /** Factory which creates and registers new {@link RpcService}. */ protected class DedicatedRpcServiceFactory implements RpcServiceFactory { private final Configuration configuration; private final String externalAddress; private final String externalPortRange; private final String bindAddress; private final RpcSystem rpcSystem; DedicatedRpcServiceFactory( Configuration configuration, String externalAddress, String externalPortRange, String bindAddress, RpcSystem rpcSystem) { this.configuration = configuration; this.externalAddress = externalAddress; this.externalPortRange = externalPortRange; this.bindAddress = bindAddress; this.rpcSystem = rpcSystem; } @Override public RpcService createRpcService() throws Exception { final RpcService rpcService = MiniCluster.this.createRemoteRpcService( configuration, externalAddress, externalPortRange, bindAddress, rpcSystem); synchronized (lock) { rpcServices.add(rpcService); } return rpcService; } } private void initializeIOFormatClasses(Configuration configuration) { FileOutputFormat.initDefaultsFromConfiguration(configuration); } private class TerminatingFatalErrorHandler implements FatalErrorHandler { private final int index; private TerminatingFatalErrorHandler(int index) { this.index = index; } @Override public void onFatalError(Throwable exception) { if (running) { LOG.error("TaskManager synchronized (lock) { taskManagers.get(index).closeAsync(); } } } } private class ShutDownFatalErrorHandler implements FatalErrorHandler { @Override public void onFatalError(Throwable exception) { LOG.warn("Error in MiniCluster. Shutting the MiniCluster down.", exception); closeAsync(); } } private class TerminatingFatalErrorHandlerFactory { /** * Create a new {@link TerminatingFatalErrorHandler} for the {@link TaskExecutor} with the * given index. * * @param index into the {@link * TaskExecutor}. * @return {@link TerminatingFatalErrorHandler} for the given index */ @GuardedBy("lock") private TerminatingFatalErrorHandler create(int index) { return new TerminatingFatalErrorHandler(index); } } /** HA Services to use. */ public enum HaServices { /** Uses the configured HA Services in {@link HighAvailabilityOptions CONFIGURED, /** * Enables or disables {@link HaLeadershipControl} in {@link * MiniCluster * * <p>{@link HaLeadershipControl} allows granting and revoking leadership of HA components. * Enabling this feature disables {@link HighAvailabilityOptions */ WITH_LEADERSHIP_CONTROL } }
Let me confirm that this line of code should reverts to the state before modification, so that the test will be skipped instead of failed in the CI environment without k8s, right?
public static void checkEnv() { final String kubeConfigEnv = System.getenv("ITCASE_KUBECONFIG"); assertThat(StringUtils.isNullOrWhitespaceOnly(kubeConfigEnv)) .withFailMessage("ITCASE_KUBECONFIG environment is not set.") .isFalse(); kubeConfigFile = kubeConfigEnv; }
assertThat(StringUtils.isNullOrWhitespaceOnly(kubeConfigEnv))
public static void checkEnv() { final String kubeConfigEnv = System.getenv("ITCASE_KUBECONFIG"); assumeThat(kubeConfigEnv) .withFailMessage("ITCASE_KUBECONFIG environment is not set.") .isNotBlank(); kubeConfigFile = kubeConfigEnv; }
class KubernetesExtension implements BeforeAllCallback, AfterAllCallback { private static final String CLUSTER_ID = "flink-itcase-cluster"; private static final int KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES = 100; private static String kubeConfigFile; private Configuration configuration; private FlinkKubeClient flinkKubeClient; @Override public void beforeAll(ExtensionContext extensionContext) throws Exception { checkEnv(); configuration = new Configuration(); configuration.set(KubernetesConfigOptions.KUBE_CONFIG_FILE, kubeConfigFile); configuration.setString(KubernetesConfigOptions.CLUSTER_ID, CLUSTER_ID); configuration.set( KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES, KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); final FlinkKubeClientFactory kubeClientFactory = new FlinkKubeClientFactory(); flinkKubeClient = kubeClientFactory.fromConfiguration(configuration, "testing"); } @Override public void afterAll(ExtensionContext extensionContext) throws Exception { flinkKubeClient.close(); } public Configuration getConfiguration() { return configuration; } public FlinkKubeClient getFlinkKubeClient() { return flinkKubeClient; } }
class KubernetesExtension implements BeforeAllCallback, AfterAllCallback { private static final String CLUSTER_ID = "flink-itcase-cluster"; private static final int KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES = 100; private static String kubeConfigFile; private Configuration configuration; private FlinkKubeClient flinkKubeClient; @Override public void beforeAll(ExtensionContext extensionContext) throws Exception { checkEnv(); configuration = new Configuration(); configuration.set(KubernetesConfigOptions.KUBE_CONFIG_FILE, kubeConfigFile); configuration.setString(KubernetesConfigOptions.CLUSTER_ID, CLUSTER_ID); configuration.set( KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES, KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); final FlinkKubeClientFactory kubeClientFactory = new FlinkKubeClientFactory(); flinkKubeClient = kubeClientFactory.fromConfiguration(configuration, "testing"); } @Override public void afterAll(ExtensionContext extensionContext) throws Exception { if (flinkKubeClient != null) { flinkKubeClient.close(); } } public Configuration getConfiguration() { return configuration; } public FlinkKubeClient getFlinkKubeClient() { return flinkKubeClient; } }
```suggestion ``` nit: We can remove this check here since `recordActiveMessageInProcessingTimesMap` also has a null check inside
public Closeable enterState(ExecutionState newState) { Closeable baseCloseable = super.enterState(newState); final boolean isDataflowProcessElementState = newState.isProcessElementState && newState instanceof DataflowExecutionState; if (isDataflowProcessElementState) { DataflowExecutionState newDFState = (DataflowExecutionState) newState; if (newDFState.getStepName() != null && newDFState.getStepName().userName() != null) { synchronized (this) { if (this.activeMessageMetadata != null) { recordActiveMessageInProcessingTimesMap(); } this.activeMessageMetadata = ActiveMessageMetadata.create( newDFState.getStepName().userName(), clock.getMillis()); } } elementExecutionTracker.enter(newDFState.getStepName()); } return () -> { if (isDataflowProcessElementState) { synchronized (this) { if (this.activeMessageMetadata != null) { recordActiveMessageInProcessingTimesMap(); } } elementExecutionTracker.exit(); } baseCloseable.close(); }; }
if (this.activeMessageMetadata != null) {
public Closeable enterState(ExecutionState newState) { Closeable baseCloseable = super.enterState(newState); final boolean isDataflowProcessElementState = newState.isProcessElementState && newState instanceof DataflowExecutionState; if (isDataflowProcessElementState) { DataflowExecutionState newDFState = (DataflowExecutionState) newState; if (newDFState.getStepName() != null && newDFState.getStepName().userName() != null) { recordActiveMessageInProcessingTimesMap(); synchronized (this) { this.activeMessageMetadata = ActiveMessageMetadata.create( newDFState.getStepName().userName(), clock.getMillis()); } } elementExecutionTracker.enter(newDFState.getStepName()); } return () -> { if (isDataflowProcessElementState) { recordActiveMessageInProcessingTimesMap(); elementExecutionTracker.exit(); } baseCloseable.close(); }; }
class DataflowExecutionStateTracker extends ExecutionStateTracker { private final ElementExecutionTracker elementExecutionTracker; private final DataflowOperationContext.DataflowExecutionState otherState; private final ContextActivationObserverRegistry contextActivationObserverRegistry; private final String workItemId; /** * Metadata on the message whose processing is currently being managed by this tracker. If no * message is actively being processed, activeMessageMetadata will be null. */ @GuardedBy("this") @Nullable private ActiveMessageMetadata activeMessageMetadata = null; private final MillisProvider clock = System::currentTimeMillis; @GuardedBy("this") private final Map<String, IntSummaryStatistics> processingTimesByStep = new HashMap<>(); public DataflowExecutionStateTracker( ExecutionStateSampler sampler, DataflowOperationContext.DataflowExecutionState otherState, CounterFactory counterFactory, PipelineOptions options, String workItemId) { super(sampler); this.elementExecutionTracker = DataflowElementExecutionTracker.create(counterFactory, options); this.otherState = otherState; this.workItemId = workItemId; this.contextActivationObserverRegistry = ContextActivationObserverRegistry.createDefault(); } @Override public Closeable activate() { Closer closer = Closer.create(); try { closer.register(super.activate()); for (ContextActivationObserver p : contextActivationObserverRegistry.getContextActivationObservers()) { closer.register(p.activate(this)); } closer.register(enterState(otherState)); return closer; } catch (Exception e) { try { closer.close(); } catch (IOException suppressed) { e.addSuppressed(suppressed); } throw e; } } @Override protected void takeSampleOnce(long millisSinceLastSample) { elementExecutionTracker.takeSample(millisSinceLastSample); super.takeSampleOnce(millisSinceLastSample); } /** * Enter a new state on the tracker. If the new state is a Dataflow processing state, tracks the * activeMessageMetadata with the start time of the new state. */ @Override public String getWorkItemId() { return this.workItemId; } public synchronized Optional<ActiveMessageMetadata> getActiveMessageMetadata() { return Optional.ofNullable(activeMessageMetadata); } public synchronized Map<String, IntSummaryStatistics> getProcessingTimesByStepCopy() { Map<String, IntSummaryStatistics> processingTimesCopy = processingTimesByStep.entrySet().stream() .collect( Collectors.toMap( e -> e.getKey(), e -> { IntSummaryStatistics clone = new IntSummaryStatistics(); clone.combine(e.getValue()); return clone; })); return processingTimesCopy; } /** * Transitions the metadata for the currently active message to an entry in the completed * processing times map. Sets the activeMessageMetadata to null after the entry has been * recorded. */ private synchronized void recordActiveMessageInProcessingTimesMap() { if (this.activeMessageMetadata == null) { return; } this.processingTimesByStep.compute( this.activeMessageMetadata.userStepName(), (k, v) -> { if (v == null) { v = new IntSummaryStatistics(); } synchronized (this) { v.accept((int) (System.currentTimeMillis() - this.activeMessageMetadata.startTime())); } return v; }); this.activeMessageMetadata = null; } }
class DataflowExecutionStateTracker extends ExecutionStateTracker { private final ElementExecutionTracker elementExecutionTracker; private final DataflowOperationContext.DataflowExecutionState otherState; private final ContextActivationObserverRegistry contextActivationObserverRegistry; private final String workItemId; /** * Metadata on the message whose processing is currently being managed by this tracker. If no * message is actively being processed, activeMessageMetadata will be null. */ @GuardedBy("this") @Nullable private ActiveMessageMetadata activeMessageMetadata = null; private final MillisProvider clock = System::currentTimeMillis; @GuardedBy("this") private final Map<String, IntSummaryStatistics> processingTimesByStep = new HashMap<>(); public DataflowExecutionStateTracker( ExecutionStateSampler sampler, DataflowOperationContext.DataflowExecutionState otherState, CounterFactory counterFactory, PipelineOptions options, String workItemId) { super(sampler); this.elementExecutionTracker = DataflowElementExecutionTracker.create(counterFactory, options); this.otherState = otherState; this.workItemId = workItemId; this.contextActivationObserverRegistry = ContextActivationObserverRegistry.createDefault(); } @Override public Closeable activate() { Closer closer = Closer.create(); try { closer.register(super.activate()); for (ContextActivationObserver p : contextActivationObserverRegistry.getContextActivationObservers()) { closer.register(p.activate(this)); } closer.register(enterState(otherState)); return closer; } catch (Exception e) { try { closer.close(); } catch (IOException suppressed) { e.addSuppressed(suppressed); } throw e; } } @Override protected void takeSampleOnce(long millisSinceLastSample) { elementExecutionTracker.takeSample(millisSinceLastSample); super.takeSampleOnce(millisSinceLastSample); } /** * Enter a new state on the tracker. If the new state is a Dataflow processing state, tracks the * activeMessageMetadata with the start time of the new state. */ @Override public String getWorkItemId() { return this.workItemId; } public synchronized Optional<ActiveMessageMetadata> getActiveMessageMetadata() { return Optional.ofNullable(activeMessageMetadata); } public synchronized Map<String, IntSummaryStatistics> getProcessingTimesByStepCopy() { Map<String, IntSummaryStatistics> processingTimesCopy = processingTimesByStep.entrySet().stream() .collect( Collectors.toMap( e -> e.getKey(), e -> { IntSummaryStatistics clone = new IntSummaryStatistics(); clone.combine(e.getValue()); return clone; })); return processingTimesCopy; } /** * Transitions the metadata for the currently active message to an entry in the completed * processing times map. Sets the activeMessageMetadata to null after the entry has been * recorded. */ private synchronized void recordActiveMessageInProcessingTimesMap() { if (this.activeMessageMetadata == null) { return; } int processingTime = (int) (System.currentTimeMillis() - this.activeMessageMetadata.startTime()); this.processingTimesByStep.compute( this.activeMessageMetadata.userStepName(), (k, v) -> { if (v == null) { v = new IntSummaryStatistics(); } v.accept(processingTime); return v; }); this.activeMessageMetadata = null; } }
Should these be `from integer`, because this was originally set as `integer` and is now being accessed in various ways?
public static Collection<TestSpec> getSpecs() { return Arrays.asList( TestSpec.whenAccessed(conf -> conf.getInteger("int", 0)).expect(5), TestSpec.whenAccessed(conf -> conf.getLong("int", 0)).expect(5L), TestSpec.whenAccessed(conf -> conf.getFloat("int", 0)).expect(5f), TestSpec.whenAccessed(conf -> conf.getDouble("int", 0)).expect(5.0), TestSpec.whenAccessed(conf -> conf.getBoolean("int", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("int", "0")).expect("5"), TestSpec.whenAccessed(conf -> conf.getBytes("int", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("long", 0)).expect(15), TestSpec.whenAccessed(conf -> conf.getLong("long", 0)).expect(15L), TestSpec.whenAccessed(conf -> conf.getFloat("long", 0)).expect(15f), TestSpec.whenAccessed(conf -> conf.getDouble("long", 0)).expect(15.0), TestSpec.whenAccessed(conf -> conf.getBoolean("long", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("long", "0")).expect("15"), TestSpec.whenAccessed(conf -> conf.getBytes("long", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("too_long", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("too_long", 0)).expect(TOO_LONG), TestSpec.whenAccessed(conf -> conf.getFloat("too_long", 0)).expect((float) TOO_LONG), TestSpec.whenAccessed(conf -> conf.getDouble("too_long", 0)).expect((double) TOO_LONG), TestSpec.whenAccessed(conf -> conf.getBoolean("too_long", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("too_long", "0")).expect(String.valueOf(TOO_LONG)), TestSpec.whenAccessed(conf -> conf.getBytes("too_long", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("float", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("float", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("float", 0)).expect(2.1456775f), TestSpec.whenAccessed(conf -> conf.getDouble("float", 0)).expect(closeTo(2.1456775, 0.0000001)), TestSpec.whenAccessed(conf -> conf.getBoolean("float", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("float", "0")).expect(startsWith("2.145677")), TestSpec.whenAccessed(conf -> conf.getBytes("float", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("double", 0)).expect(new IsCloseTo(3.141592f, 0.000001f)), TestSpec.whenAccessed(conf -> conf.getDouble("double", 0)).expect(Math.PI), TestSpec.whenAccessed(conf -> conf.getBoolean("double", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("double", "0")).expect(startsWith("3.1415926535")), TestSpec.whenAccessed(conf -> conf.getBytes("double", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("negative_double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("negative_double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("negative_double", 0)) .expect(new IsCloseTo(-1f, 0.000001f)), TestSpec.whenAccessed(conf -> conf.getDouble("negative_double", 0)).expect(-1D), TestSpec.whenAccessed(conf -> conf.getBoolean("negative_double", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("negative_double", "0")).expect(startsWith("-1")), TestSpec.whenAccessed(conf -> conf.getBytes("negative_double", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("zero", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("zero", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("zero", 0)).expect(new IsCloseTo(0f, 0.000001f)), TestSpec.whenAccessed(conf -> conf.getDouble("zero", 0)).expect(0D), TestSpec.whenAccessed(conf -> conf.getBoolean("zero", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("zero", "0")).expect(startsWith("0")), TestSpec.whenAccessed(conf -> conf.getBytes("zero", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("too_long_double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("too_long_double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("too_long_double", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getDouble("too_long_double", 0)).expect(TOO_LONG_DOUBLE), TestSpec.whenAccessed(conf -> conf.getBoolean("too_long_double", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("too_long_double", "0")) .expect(String.valueOf(TOO_LONG_DOUBLE)), TestSpec.whenAccessed(conf -> conf.getBytes("too_long_double", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("string", 0)).expect(42), TestSpec.whenAccessed(conf -> conf.getLong("string", 0)).expect(42L), TestSpec.whenAccessed(conf -> conf.getFloat("string", 0)).expect(42f), TestSpec.whenAccessed(conf -> conf.getDouble("string", 0)).expect(42.0), TestSpec.whenAccessed(conf -> conf.getBoolean("string", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("string", "0")).expect("42"), TestSpec.whenAccessed(conf -> conf.getBytes("string", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("non_convertible_string", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("non_convertible_string", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("non_convertible_string", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getDouble("non_convertible_string", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getBoolean("non_convertible_string", true)).expectException(""), TestSpec.whenAccessed(conf -> conf.getString("non_convertible_string", "0")).expect("bcdefg&&"), TestSpec.whenAccessed(conf -> conf.getBytes("non_convertible_string", EMPTY_BYTES)).expect(EMPTY_BYTES), TestSpec.whenAccessed(conf -> conf.getInteger("boolean", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getLong("boolean", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getFloat("boolean", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getDouble("boolean", 0)).expectException(""), TestSpec.whenAccessed(conf -> conf.getBoolean("boolean", false)).expect(true), TestSpec.whenAccessed(conf -> conf.getString("boolean", "0")).expect("true"), TestSpec.whenAccessed(conf -> conf.getBytes("boolean", EMPTY_BYTES)).expect(EMPTY_BYTES) ); }
public static Collection<TestSpec> getSpecs() { return Arrays.asList( TestSpec.whenAccessed(conf -> conf.getInteger("int", 0)).expect(5), TestSpec.whenAccessed(conf -> conf.getLong("int", 0)).expect(5L), TestSpec.whenAccessed(conf -> conf.getFloat("int", 0)).expect(5f), TestSpec.whenAccessed(conf -> conf.getDouble("int", 0)).expect(5.0), TestSpec.whenAccessed(conf -> conf.getBoolean("int", true)) .expectException("Unrecognized option for boolean: 5. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("int", "0")).expect("5"), TestSpec.whenAccessed(conf -> conf.getBytes("int", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 5 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "int", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Integer as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("long", 0)).expect(15), TestSpec.whenAccessed(conf -> conf.getLong("long", 0)).expect(15L), TestSpec.whenAccessed(conf -> conf.getFloat("long", 0)).expect(15f), TestSpec.whenAccessed(conf -> conf.getDouble("long", 0)).expect(15.0), TestSpec.whenAccessed(conf -> conf.getBoolean("long", true)) .expectException("Unrecognized option for boolean: 15. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("long", "0")).expect("15"), TestSpec.whenAccessed(conf -> conf.getBytes("long", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 15 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "long", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Long as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("too_long", 0)) .expectException("Configuration value 2147483657 overflows/underflows the integer type"), TestSpec.whenAccessed(conf -> conf.getLong("too_long", 0)).expect(TOO_LONG), TestSpec.whenAccessed(conf -> conf.getFloat("too_long", 0)).expect((float) TOO_LONG), TestSpec.whenAccessed(conf -> conf.getDouble("too_long", 0)).expect((double) TOO_LONG), TestSpec.whenAccessed(conf -> conf.getBoolean("too_long", true)) .expectException( "Unrecognized option for boolean: 2147483657. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("too_long", "0")).expect(String.valueOf(TOO_LONG)), TestSpec.whenAccessed(conf -> conf.getBytes("too_long", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 2147483657 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "too_long", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Long as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("float", 0)) .expectException("For input string: \"2.1456776\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getLong("float", 0)) .expectException("For input string: \"2.1456776\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getFloat("float", 0)) .expect(2.1456775f), TestSpec.whenAccessed(conf -> conf.getDouble("float", 0)).expect(closeTo(2.1456775, 0.0000001)), TestSpec.whenAccessed(conf -> conf.getBoolean("float", true)) .expectException( "Unrecognized option for boolean: 2.1456776. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("float", "0")).expect(startsWith("2.145677")), TestSpec.whenAccessed(conf -> conf.getBytes("float", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 2.1456776 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "float", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("onfiguration cannot evaluate object of class class java.lang.Float as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("double", 0)) .expectException("For input string: \"3.141592653589793\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getLong("double", 0)) .expectException("For input string: \"3.141592653589793\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getFloat("double", 0)).expect(new IsCloseTo(3.141592f, 0.000001f)), TestSpec.whenAccessed(conf -> conf.getDouble("double", 0)).expect(Math.PI), TestSpec.whenAccessed(conf -> conf.getBoolean("double", true)) .expectException( "Unrecognized option for boolean: 3.141592653589793. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("double", "0")).expect(startsWith("3.1415926535")), TestSpec.whenAccessed(conf -> conf.getBytes("double", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 3.141592653589793 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "double", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("onfiguration cannot evaluate object of class class java.lang.Double as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("negative_double", 0)) .expectException("For input string: \"-1.0\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getLong("negative_double", 0)) .expectException("For input string: \"-1.0\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getFloat("negative_double", 0)) .expect(new IsCloseTo(-1f, 0.000001f)), TestSpec.whenAccessed(conf -> conf.getDouble("negative_double", 0)).expect(-1D), TestSpec.whenAccessed(conf -> conf.getBoolean("negative_double", true)) .expectException("Unrecognized option for boolean: -1.0. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("negative_double", "0")).expect(startsWith("-1")), TestSpec.whenAccessed(conf -> conf.getBytes("negative_double", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value -1.0 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "negative_double", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Double as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("zero", 0)) .expectException("For input string: \"0.0\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getLong("zero", 0)) .expectException("For input string: \"0.0\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getFloat("zero", 0)).expect(new IsCloseTo(0f, 0.000001f)), TestSpec.whenAccessed(conf -> conf.getDouble("zero", 0)).expect(0D), TestSpec.whenAccessed(conf -> conf.getBoolean("zero", true)) .expectException("Unrecognized option for boolean: 0.0. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("zero", "0")).expect(startsWith("0")), TestSpec.whenAccessed(conf -> conf.getBytes("zero", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 0.0 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "zero", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Double as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("too_long_double", 0)) .expectException("For input string: \"1.7976931348623157E308\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getLong("too_long_double", 0)) .expectException("For input string: \"1.7976931348623157E308\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getFloat("too_long_double", 0)) .expectException("Configuration value 1.7976931348623157E308 overflows/underflows the float type."), TestSpec.whenAccessed(conf -> conf.getDouble("too_long_double", 0)).expect(TOO_LONG_DOUBLE), TestSpec.whenAccessed(conf -> conf.getBoolean("too_long_double", true)) .expectException("Unrecognized option for boolean: 1.7976931348623157E308. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("too_long_double", "0")) .expect(String.valueOf(TOO_LONG_DOUBLE)), TestSpec.whenAccessed(conf -> conf.getBytes("too_long_double", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 1.7976931348623157E308 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "too_long_double", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Double as a class name"), TestSpec.whenAccessed(conf -> conf.getInteger("string", 0)).expect(42), TestSpec.whenAccessed(conf -> conf.getLong("string", 0)).expect(42L), TestSpec.whenAccessed(conf -> conf.getFloat("string", 0)).expect(42f), TestSpec.whenAccessed(conf -> conf.getDouble("string", 0)).expect(42.0), TestSpec.whenAccessed(conf -> conf.getBoolean("string", true)) .expectException("Unrecognized option for boolean: 42. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("string", "0")).expect("42"), TestSpec.whenAccessed(conf -> conf.getBytes("string", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value 42 as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "string", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("42", ClassNotFoundException.class), TestSpec.whenAccessed(conf -> conf.getInteger("non_convertible_string", 0)) .expectException("For input string: \"bcdefg&&\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getLong("non_convertible_string", 0)) .expectException("For input string: \"bcdefg&&\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getFloat("non_convertible_string", 0)) .expectException("For input string: \"bcdefg&&\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getDouble("non_convertible_string", 0)) .expectException("For input string: \"bcdefg&&\"", NumberFormatException.class), TestSpec.whenAccessed(conf -> conf.getBoolean("non_convertible_string", true)) .expectException("Unrecognized option for boolean: bcdefg&&. Expected either true or false(case insensitive)"), TestSpec.whenAccessed(conf -> conf.getString("non_convertible_string", "0")).expect("bcdefg&&"), TestSpec.whenAccessed(conf -> conf.getBytes("non_convertible_string", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value bcdefg&& as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "non_convertible_string", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("bcdefg&&", ClassNotFoundException.class), TestSpec.whenAccessed(conf -> conf.getInteger("boolean", 0)) .expectException("For input string: \"true\""), TestSpec.whenAccessed(conf -> conf.getLong("boolean", 0)) .expectException("For input string: \"true\""), TestSpec.whenAccessed(conf -> conf.getFloat("boolean", 0)) .expectException("For input string: \"true\""), TestSpec.whenAccessed(conf -> conf.getDouble("boolean", 0)) .expectException("For input string: \"true\""), TestSpec.whenAccessed(conf -> conf.getBoolean("boolean", false)).expect(true), TestSpec.whenAccessed(conf -> conf.getString("boolean", "0")).expect("true"), TestSpec.whenAccessed(conf -> conf.getBytes("boolean", EMPTY_BYTES)) .expectException("Configuration cannot evaluate value true as a byte[] value"), TestSpec.whenAccessed(conf -> conf.getClass( "boolean", ConfigurationConversionsTest.class, ConfigurationConversionsTest.class.getClassLoader())) .expectException("Configuration cannot evaluate object of class class java.lang.Boolean as a class name") ); }
class ConfigurationConversionsTest { private static final byte[] EMPTY_BYTES = new byte[0]; private static final long TOO_LONG = Integer.MAX_VALUE + 10L; private static final double TOO_LONG_DOUBLE = Double.MAX_VALUE; private Configuration pc; @Before public void init() { pc = new Configuration(); pc.setInteger("int", 5); pc.setLong("long", 15); pc.setLong("too_long", TOO_LONG); pc.setFloat("float", 2.1456775f); pc.setDouble("double", Math.PI); pc.setDouble("negative_double", -1.0); pc.setDouble("zero", 0.0); pc.setDouble("too_long_double", TOO_LONG_DOUBLE); pc.setString("string", "42"); pc.setString("non_convertible_string", "bcdefg&&"); pc.setBoolean("boolean", true); } @Rule public ExpectedException thrown = ExpectedException.none(); @Parameterized.Parameters @Parameterized.Parameter public TestSpec<?> testSpec; @Test public void testConversions() { testSpec.getExpectedException().ifPresent(exception -> { thrown.expectMessage(exception); } ); testSpec.assertConfiguration(pc); } private static class IsCloseTo extends TypeSafeMatcher<Float> { private final float delta; private final float value; public IsCloseTo(float value, float error) { this.delta = error; this.value = value; } public boolean matchesSafely(Float item) { return this.actualDelta(item) <= 0.0D; } public void describeMismatchSafely(Float item, Description mismatchDescription) { mismatchDescription.appendValue(item).appendText(" differed by ").appendValue(this.actualDelta(item)); } public void describeTo(Description description) { description.appendText("a numeric value within ") .appendValue(this.delta) .appendText(" of ") .appendValue(this.value); } private double actualDelta(Float item) { return Math.abs(item - this.value) - this.delta; } } private static class TestSpec<T> { private final Function<Configuration, T> accessor; private Matcher<T> matcher; @Nullable private String expectedException = null; private TestSpec(Function<Configuration, T> accessor) { this.accessor = accessor; } public static <T> TestSpec<T> whenAccessed(Function<Configuration, T> accessor) { return new TestSpec<T>(accessor); } public TestSpec<T> expect(Matcher<T> expected) { this.matcher = expected; return this; } public TestSpec<T> expect(T expected) { this.matcher = equalTo(expected); return this; } public TestSpec<T> expectException(String message) { this.expectedException = message; return this; } public Optional<String> getExpectedException() { return Optional.ofNullable(expectedException); } void assertConfiguration(Configuration conf) { assertThat(accessor.apply(conf), matcher); } } }
class ConfigurationConversionsTest { private static final byte[] EMPTY_BYTES = new byte[0]; private static final long TOO_LONG = Integer.MAX_VALUE + 10L; private static final double TOO_LONG_DOUBLE = Double.MAX_VALUE; private Configuration pc; @Before public void init() { pc = new Configuration(); pc.setInteger("int", 5); pc.setLong("long", 15); pc.setLong("too_long", TOO_LONG); pc.setFloat("float", 2.1456775f); pc.setDouble("double", Math.PI); pc.setDouble("negative_double", -1.0); pc.setDouble("zero", 0.0); pc.setDouble("too_long_double", TOO_LONG_DOUBLE); pc.setString("string", "42"); pc.setString("non_convertible_string", "bcdefg&&"); pc.setBoolean("boolean", true); } @Rule public ExpectedException thrown = ExpectedException.none(); @Parameterized.Parameters @Parameterized.Parameter public TestSpec<?> testSpec; @Test public void testConversions() throws Exception { testSpec.getExpectedException().ifPresent(exception -> { thrown.expect(testSpec.getExceptionClass()); thrown.expectMessage(exception); } ); testSpec.assertConfiguration(pc); } private static class IsCloseTo extends TypeSafeMatcher<Float> { private final float delta; private final float value; public IsCloseTo(float value, float error) { this.delta = error; this.value = value; } public boolean matchesSafely(Float item) { return this.actualDelta(item) <= 0.0D; } public void describeMismatchSafely(Float item, Description mismatchDescription) { mismatchDescription.appendValue(item).appendText(" differed by ").appendValue(this.actualDelta(item)); } public void describeTo(Description description) { description.appendText("a numeric value within ") .appendValue(this.delta) .appendText(" of ") .appendValue(this.value); } private double actualDelta(Float item) { return Math.abs(item - this.value) - this.delta; } } private static class TestSpec<T> { private final ConfigurationAccessor<T> configurationAccessor; private Matcher<T> matcher; @Nullable private String expectedException = null; @Nullable private Class<? extends Exception> exceptionClass; @FunctionalInterface private interface ConfigurationAccessor<T> { T access(Configuration configuration) throws Exception; } private TestSpec(ConfigurationAccessor<T> configurationAccessor) { this.configurationAccessor = configurationAccessor; } public static <T> TestSpec<T> whenAccessed(ConfigurationAccessor<T> configurationAccessor) { return new TestSpec<T>(configurationAccessor); } public TestSpec<T> expect(Matcher<T> expected) { this.matcher = expected; return this; } public TestSpec<T> expect(T expected) { this.matcher = equalTo(expected); return this; } public TestSpec<T> expectException(String message) { this.expectedException = message; this.exceptionClass = IllegalArgumentException.class; return this; } public TestSpec<T> expectException(String message, Class<? extends Exception> exceptionClass) { this.expectedException = message; this.exceptionClass = exceptionClass; return this; } public Optional<String> getExpectedException() { return Optional.ofNullable(expectedException); } @Nullable public Class<? extends Exception> getExceptionClass() { return exceptionClass; } void assertConfiguration(Configuration conf) throws Exception { assertThat(configurationAccessor.access(conf), matcher); } } }
Isn't this a no-op unless you put maps separated by `forward()` into different slot sharing groups?
private long calculateExpectedResultBeforeSavepoint() { long expectedBeforeSavepointResult = 0; for (int i = 0; i < TOTAL_RECORDS; i++) { expectedBeforeSavepointResult += i; } return PARALLELISM * expectedBeforeSavepointResult; }
private long calculateExpectedResultBeforeSavepoint() { long expectedBeforeSavepointResult = 0; for (int i = 0; i < TOTAL_RECORDS; i++) { expectedBeforeSavepointResult += i; } return PARALLELISM * expectedBeforeSavepointResult; }
class RestoreUpgradedJobITCase extends TestLogger { private static final int PARALLELISM = 3; private static final int TOTAL_RECORDS = 100; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); @Parameterized.Parameter public TestCheckpointType checkpointType; @ClassRule public static final MiniClusterWithClientResource CLUSTER = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(new Configuration()) .setNumberTaskManagers(2) .setNumberSlotsPerTaskManager(10) .build()); @Rule public final SharedObjects sharedObjects = SharedObjects.create(); private SharedReference<OneShotLatch> allDataEmittedLatch; private SharedReference<AtomicLong> result; public void setupSharedObjects() { allDataEmittedLatch = sharedObjects.add(new OneShotLatch()); result = sharedObjects.add(new AtomicLong()); } @Parameterized.Parameters(name = "Savepoint type[{0}]") public static Object[][] parameters() { return new Object[][] { {ALIGNED_CHECKPOINT}, {CANONICAL_SAVEPOINT}, {NATIVE_SAVEPOINT}, }; } enum TestCheckpointType { ALIGNED_CHECKPOINT, CANONICAL_SAVEPOINT, NATIVE_SAVEPOINT } enum MapName { MAP_1, MAP_2, MAP_3, MAP_4, MAP_5, MAP_6; int id() { return ordinal() + 1; } } @Test public void testRestoreUpgradedJob() throws Exception { setupSharedObjects(); StreamExecutionEnvironment env = originalStream(); JobClient jobClient = env.executeAsync("Total sum"); waitForAllTaskRunning(CLUSTER.getMiniCluster(), jobClient.getJobID(), false); allDataEmittedLatch.get().await(); allDataEmittedLatch.get().reset(); String savepointPath = stopWithCheckpointOrSavepoint(jobClient); assertThat(result.get().longValue(), is(calculateExpectedResultBeforeSavepoint())); result.get().set(0); env = upgradedStream(savepointPath); jobClient = env.executeAsync("Total sum"); waitForAllTaskRunning(CLUSTER.getMiniCluster(), jobClient.getJobID(), false); allDataEmittedLatch.get().await(); jobClient .stopWithSavepoint( true, temporaryFolder.getRoot().getAbsolutePath(), SavepointFormatType.CANONICAL) .get(); assertThat(result.get().longValue(), is(calculateExpectedResultAfterSavepoint())); } private long calculateExpectedResultAfterSavepoint() { long totalStates = 0; for (int i = 1; i <= MapName.values().length; i++) { totalStates += (long) i * i; } long expectedAfterSavepointResult = 0; for (int i = 0; i < TOTAL_RECORDS; i++) { expectedAfterSavepointResult += i + totalStates; } return PARALLELISM * expectedAfterSavepointResult; } @NotNull private StreamExecutionEnvironment originalStream() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getCheckpointConfig() .setExternalizedCheckpointCleanup( CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); env.getCheckpointConfig() .setCheckpointStorage("file: env.setParallelism(PARALLELISM); env.enableCheckpointing(Integer.MAX_VALUE); env.addSource(new IntSource(allDataEmittedLatch)) .map(new IntMap(MAP_5.id())) .uid(MAP_5.name()) .forward() .map(new IntMap(MAP_1.id())) .uid(MAP_1.name()) .keyBy((key) -> key) .map(new IntMap(MAP_6.id())) .uid(MAP_6.name()) .rebalance() .map(new IntMap(MAP_4.id())) .uid(MAP_4.name()) .broadcast() .map(new IntMap(MAP_2.id())) .uid(MAP_2.name()) .rescale() .map(new IntMap(MAP_3.id())) .uid(MAP_3.name()) .addSink(new IntSink(result)) .setParallelism(1); return env; } @NotNull private StreamExecutionEnvironment upgradedStream(String savepointPath) { StreamExecutionEnvironment env; Configuration conf = new Configuration(); conf.set(SavepointConfigOptions.SAVEPOINT_PATH, savepointPath); env = StreamExecutionEnvironment.getExecutionEnvironment(conf); env.setParallelism(PARALLELISM); env.addSource(new StringSource(allDataEmittedLatch)) .map(new StringMap(MAP_1.id())) .uid(MAP_1.name()) .forward() .map(new StringMap(MAP_2.id())) .uid(MAP_2.name()) .keyBy((key) -> key) .map(new StringMap(MAP_3.id())) .uid(MAP_3.name()) .rebalance() .map(new StringMap(MapName.values().length + 1)) .uid("new_map") .map(new StringMap(MAP_4.id())) .uid(MAP_4.name()) .rescale() .map(new StringMap(MAP_5.id())) .uid(MAP_5.name()) .broadcast() .map(new StringMap(MAP_6.id())) .uid(MAP_6.name()) .addSink(new StringSink(result)) .setParallelism(1); return env; } private String stopWithCheckpointOrSavepoint(JobClient jobClient) throws InterruptedException, ExecutionException { String savepointPath; if (checkpointType == ALIGNED_CHECKPOINT) { savepointPath = CLUSTER.getMiniCluster().triggerCheckpoint(jobClient.getJobID()).get(); jobClient.cancel(); } else if (checkpointType == CANONICAL_SAVEPOINT) { savepointPath = jobClient .stopWithSavepoint( true, temporaryFolder.getRoot().getAbsolutePath(), SavepointFormatType.CANONICAL) .get(); } else if (checkpointType == NATIVE_SAVEPOINT) { savepointPath = jobClient .stopWithSavepoint( true, temporaryFolder.getRoot().getAbsolutePath(), SavepointFormatType.NATIVE) .get(); } else { throw new IllegalArgumentException("Unknown checkpoint type: " + checkpointType); } return savepointPath; } private static class IntSink implements SinkFunction<Integer> { private final SharedReference<AtomicLong> result; public IntSink(SharedReference<AtomicLong> result) { this.result = result; } @Override public void invoke(Integer value, Context context) throws Exception { result.get().addAndGet(value); } } private static class StringSink implements SinkFunction<String> { private final SharedReference<AtomicLong> result; public StringSink(SharedReference<AtomicLong> result) { this.result = result; } @Override public void invoke(String value, Context context) throws Exception { result.get().addAndGet(Integer.parseInt(value)); } } private static class IntMap extends AbstractMap<Integer> { private IntMap(int id) { super(id); } @Override public Integer map(Integer value) throws Exception { return calculate(value); } } private static class StringMap extends AbstractMap<String> { private StringMap(int id) { super(id); } @Override public String map(String value) throws Exception { return String.valueOf(calculate(Integer.parseInt(value))); } } private abstract static class AbstractMap<T extends Object> extends RichMapFunction<T, T> implements CheckpointedFunction { private ListState<Integer> valueState; private final int id; private int state; private AbstractMap(int id) { this.id = id; } protected int calculate(int value) throws Exception { return value + state; } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { valueState.add(id); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { this.valueState = context.getOperatorStateStore() .getListState(new ListStateDescriptor<>("state", Types.INT)); Iterator<Integer> iterator = valueState.get().iterator(); if (iterator.hasNext()) { state = id * iterator.next(); } } } private static class IntSource extends TestSource<Integer> { public IntSource(SharedReference<OneShotLatch> dataEmitted) { super(dataEmitted); } @Override void collect(SourceContext<Integer> ctx, int index) { ctx.collect(index); } } private static class StringSource extends TestSource<String> { public StringSource(SharedReference<OneShotLatch> dataEmitted) { super(dataEmitted); } @Override void collect(SourceContext<String> ctx, int index) { ctx.collect(String.valueOf(index)); } } private abstract static class TestSource<T> implements SourceFunction<T> { private static final long serialVersionUID = 1L; private final SharedReference<OneShotLatch> dataEmitted; private volatile boolean isRunning = true; public TestSource(SharedReference<OneShotLatch> dataEmitted) { this.dataEmitted = dataEmitted; } @Override public void run(SourceContext<T> ctx) throws Exception { int i = TOTAL_RECORDS; while (i-- > 0) { synchronized (ctx.getCheckpointLock()) { collect(ctx, i); } } dataEmitted.get().trigger(); while (isRunning) { LockSupport.parkNanos(100000); } } abstract void collect(SourceContext<T> ctx, int index); @Override public void cancel() { isRunning = false; } } }
class RestoreUpgradedJobITCase extends TestLogger { private static final int PARALLELISM = 4; private static final int TOTAL_RECORDS = 100; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); @Parameterized.Parameter public TestCheckpointType checkpointType; @ClassRule public static final MiniClusterWithClientResource CLUSTER = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(new Configuration()) .setNumberTaskManagers(2) .setNumberSlotsPerTaskManager(4) .build()); @Rule public final SharedObjects sharedObjects = SharedObjects.create(); private SharedReference<OneShotLatch> allDataEmittedLatch; private SharedReference<AtomicLong> result; public void setupSharedObjects() { allDataEmittedLatch = sharedObjects.add(new OneShotLatch()); result = sharedObjects.add(new AtomicLong()); } @Parameterized.Parameters(name = "Savepoint type[{0}]") public static Object[][] parameters() { return new Object[][] { {ALIGNED_CHECKPOINT}, {CANONICAL_SAVEPOINT}, {NATIVE_SAVEPOINT}, }; } enum TestCheckpointType { ALIGNED_CHECKPOINT, CANONICAL_SAVEPOINT, NATIVE_SAVEPOINT } enum MapName { MAP_1, MAP_2, MAP_3, MAP_4, MAP_5, MAP_6; int id() { return ordinal() + 1; } } @Test public void testRestoreUpgradedJob() throws Exception { setupSharedObjects(); String snapshotPath = runOriginalJob(); assertThat(result.get().longValue(), is(calculateExpectedResultBeforeSavepoint())); result.get().set(0); runUpgradedJob(snapshotPath); assertThat(result.get().longValue(), is(calculateExpectedResultBeforeSavepoint())); } private long calculateExpectedResultAfterSavepoint() { long totalStates = 0; for (int i = 1; i <= MapName.values().length; i++) { totalStates += (long) i * i; } long expectedAfterSavepointResult = 0; for (int i = 0; i < TOTAL_RECORDS; i++) { expectedAfterSavepointResult += i + totalStates; } return PARALLELISM * expectedAfterSavepointResult; } @NotNull private String runOriginalJob() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getCheckpointConfig() .setExternalizedCheckpointCleanup( CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); env.getCheckpointConfig().enableUnalignedCheckpoints(false); env.getCheckpointConfig() .setCheckpointStorage("file: env.setParallelism(PARALLELISM); env.enableCheckpointing(Integer.MAX_VALUE); env.addSource(new IntSource(allDataEmittedLatch)) .map(new IntMap(MAP_5.id())) .uid(MAP_5.name()) .forward() .map(new IntMap(MAP_1.id())) .uid(MAP_1.name()) .slotSharingGroup("anotherSharingGroup") .keyBy((key) -> key) .map(new IntMap(MAP_6.id())) .uid(MAP_6.name()) .rebalance() .map(new IntMap(MAP_4.id())) .uid(MAP_4.name()) .broadcast() .map(new IntMap(MAP_2.id())) .uid(MAP_2.name()) .rescale() .map(new IntMap(MAP_3.id())) .uid(MAP_3.name()) .addSink(new IntSink(result)) .setParallelism(1); JobClient jobClient = env.executeAsync("Total sum"); waitForAllTaskRunning(CLUSTER.getMiniCluster(), jobClient.getJobID(), false); allDataEmittedLatch.get().await(); allDataEmittedLatch.get().reset(); return stopWithSnapshot(jobClient); } private void runUpgradedJob(String snapshotPath) throws Exception { StreamExecutionEnvironment env; Configuration conf = new Configuration(); conf.set(SavepointConfigOptions.SAVEPOINT_PATH, snapshotPath); env = StreamExecutionEnvironment.getExecutionEnvironment(conf); env.setParallelism(PARALLELISM); env.addSource(new StringSource(allDataEmittedLatch)) .map(new StringMap(MAP_1.id())) .uid(MAP_1.name()) .forward() .map(new StringMap(MAP_2.id())) .uid(MAP_2.name()) .slotSharingGroup("anotherSharingGroup") .keyBy((key) -> key) .map(new StringMap(MAP_3.id())) .uid(MAP_3.name()) .map(new StringMap(-1)) .uid("new_chained_map") .rebalance() .map(new StringMap(-2)) .uid("new_map2") .map(new StringMap(MAP_4.id())) .uid(MAP_4.name()) .rescale() .map(new StringMap(MAP_5.id())) .uid(MAP_5.name()) .broadcast() .map(new StringMap(MAP_6.id())) .uid(MAP_6.name()) .addSink(new StringSink(result)) .setParallelism(1); JobClient jobClient = env.executeAsync("Total sum"); waitForAllTaskRunning(CLUSTER.getMiniCluster(), jobClient.getJobID(), false); allDataEmittedLatch.get().await(); jobClient .stopWithSavepoint( true, temporaryFolder.getRoot().getAbsolutePath(), SavepointFormatType.CANONICAL) .get(); } private String stopWithSnapshot(JobClient jobClient) throws InterruptedException, ExecutionException { String snapshotPath; if (checkpointType == ALIGNED_CHECKPOINT) { snapshotPath = CLUSTER.getMiniCluster().triggerCheckpoint(jobClient.getJobID()).get(); jobClient.cancel().get(); } else if (checkpointType == CANONICAL_SAVEPOINT) { snapshotPath = jobClient .stopWithSavepoint( true, temporaryFolder.getRoot().getAbsolutePath(), SavepointFormatType.CANONICAL) .get(); } else if (checkpointType == NATIVE_SAVEPOINT) { snapshotPath = jobClient .stopWithSavepoint( true, temporaryFolder.getRoot().getAbsolutePath(), SavepointFormatType.NATIVE) .get(); } else { throw new IllegalArgumentException("Unknown checkpoint type: " + checkpointType); } return snapshotPath; } private static class IntSink implements SinkFunction<Integer> { private final SharedReference<AtomicLong> result; public IntSink(SharedReference<AtomicLong> result) { this.result = result; } @Override public void invoke(Integer value, Context context) throws Exception { result.get().addAndGet(value); } } private static class StringSink implements SinkFunction<String> { private final SharedReference<AtomicLong> result; public StringSink(SharedReference<AtomicLong> result) { this.result = result; } @Override public void invoke(String value, Context context) throws Exception { result.get().addAndGet(Integer.parseInt(value)); } } private static class IntMap extends AbstractMap<Integer> { private IntMap(int id) { super(id); } @Override public Integer map(Integer value) throws Exception { return calculate(value); } } private static class StringMap extends AbstractMap<String> { private StringMap(int id) { super(id); } @Override public String map(String value) throws Exception { return String.valueOf(calculate(Integer.parseInt(value))); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { super.initializeState(context); Iterator<Integer> iterator = valueState.get().iterator(); if (id > 0) { checkState(iterator.hasNext(), "Value state can not be empty."); Integer state = iterator.next(); checkState( id == state, String.format("Value state(%s) should be equal to id(%s).", state, id)); } checkState(!iterator.hasNext(), "Value state should be empty."); } } private abstract static class AbstractMap<T> extends RichMapFunction<T, T> implements CheckpointedFunction { protected ListState<Integer> valueState; protected final int id; private AbstractMap(int id) { this.id = id; } protected int calculate(int value) throws Exception { return value; } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { valueState.add(id); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { this.valueState = context.getOperatorStateStore() .getListState(new ListStateDescriptor<>("state", Types.INT)); } } private static class IntSource extends TestSource<Integer> { public IntSource(SharedReference<OneShotLatch> dataEmitted) { super(dataEmitted); } @Override void collect(SourceContext<Integer> ctx, int index) { ctx.collect(index); } } private static class StringSource extends TestSource<String> { public StringSource(SharedReference<OneShotLatch> dataEmitted) { super(dataEmitted); } @Override void collect(SourceContext<String> ctx, int index) { ctx.collect(String.valueOf(index)); } } private abstract static class TestSource<T> implements SourceFunction<T> { private static final long serialVersionUID = 1L; private final SharedReference<OneShotLatch> dataEmitted; private volatile boolean isRunning = true; public TestSource(SharedReference<OneShotLatch> dataEmitted) { this.dataEmitted = dataEmitted; } @Override public void run(SourceContext<T> ctx) throws Exception { int i = TOTAL_RECORDS; while (i-- > 0) { synchronized (ctx.getCheckpointLock()) { collect(ctx, i); } } dataEmitted.get().trigger(); while (isRunning) { LockSupport.parkNanos(100000); } } abstract void collect(SourceContext<T> ctx, int index); @Override public void cancel() { isRunning = false; } } }
Is there a reason why we need to propogate individual parameters here and not just pass on the full system properties map ?
private void propagateUserProperties() { final String mavenCmdLine = BootstrapMavenOptions.getMavenCmdLine(); if (mavenCmdLine == null || mavenCmdLine.isEmpty()) { return; } int i = mavenCmdLine.indexOf("-D"); if (i < 0) { return; } final StringBuilder buf = new StringBuilder(); buf.append("-D"); i += 2; while (i < mavenCmdLine.length()) { final char ch = mavenCmdLine.charAt(i++); if (!Character.isWhitespace(ch)) { buf.append(ch); } else if (buf.length() > 2) { args.add(buf.toString()); buf.setLength(2); i = mavenCmdLine.indexOf("-D", i); if (i < 0) { break; } i += 2; } } if (buf.length() > 2) { args.add(buf.toString()); } }
i = mavenCmdLine.indexOf("-D", i);
private void propagateUserProperties() { final String mavenCmdLine = BootstrapMavenOptions.getMavenCmdLine(); if (mavenCmdLine == null || mavenCmdLine.isEmpty()) { return; } int i = mavenCmdLine.indexOf("-D"); if (i < 0) { return; } final StringBuilder buf = new StringBuilder(); buf.append("-D"); i += 2; while (i < mavenCmdLine.length()) { final char ch = mavenCmdLine.charAt(i++); if (!Character.isWhitespace(ch)) { buf.append(ch); } else if (buf.length() > 2) { args.add(buf.toString()); buf.setLength(2); i = mavenCmdLine.indexOf("-D", i); if (i < 0) { break; } i += 2; } } if (buf.length() > 2) { args.add(buf.toString()); } }
class loader File wiringClassesDirectory = new File(buildDir, "wiring-devmode"); wiringClassesDirectory.mkdirs(); addToClassPaths(classPathManifest, devModeContext, wiringClassesDirectory); for (Artifact appDep : project.getArtifacts()) { addToClassPaths(classPathManifest, devModeContext, appDep.getFile()); }
class loader File wiringClassesDirectory = new File(buildDir, "wiring-devmode"); wiringClassesDirectory.mkdirs(); addToClassPaths(classPathManifest, devModeContext, wiringClassesDirectory); for (Artifact appDep : project.getArtifacts()) { addToClassPaths(classPathManifest, devModeContext, appDep.getFile()); }
```suggestion if (terminated) { return false; } if (terminate.get()) { ``` `trySplit` may have already set `terminated` to true which means that future `tryClaim` calls should fail.
public boolean tryClaim(Integer newPosition) { checkArgument(newPosition >= position); if (terminate.get()) { terminated = true; return false; } position = newPosition; return true; }
if (terminate.get()) {
public boolean tryClaim(Integer newPosition) { checkArgument(newPosition >= position); if (terminated) { return false; } if (terminate.get()) { terminated = true; return false; } position = newPosition; return true; }
class GeneratorFn extends DoFn<byte[], SubscriptionPartition> { @ProcessElement public ProcessContinuation processElement( RestrictionTracker<Integer, Integer> restrictionTracker, OutputReceiver<SubscriptionPartition> output, ManualWatermarkEstimator<Instant> estimator) { int previousCount = restrictionTracker.currentRestriction(); int newCount = getPartitionCount.apply(topic); if (!restrictionTracker.tryClaim(newCount)) { return ProcessContinuation.stop(); } if (newCount > previousCount) { for (int i = previousCount; i < newCount; ++i) { output.outputWithTimestamp( SubscriptionPartition.of(subscription, Partition.of(i)), estimator.currentWatermark()); } } estimator.setWatermark(getWatermark()); return ProcessContinuation.resume().withResumeDelay(pollDuration); } @GetInitialWatermarkEstimatorState public Instant getInitialWatermarkEstimatorState(@Timestamp Instant initial) { checkArgument(initial.equals(BoundedWindow.TIMESTAMP_MIN_VALUE)); return initial; } @GetInitialRestriction public Integer getInitialRestriction() { return 0; } @NewTracker public RestrictionTracker<Integer, Integer> newTracker(@Restriction Integer input) { return new RestrictionTracker<Integer, Integer>() { private boolean terminated = false; private int position = input; @Override @Override public Integer currentRestriction() { return position; } @Override public @Nullable SplitResult<Integer> trySplit(double fractionOfRemainder) { if (fractionOfRemainder != 0) { return null; } if (terminated) { return null; } terminated = true; return SplitResult.of(position, position); } @Override public void checkDone() throws IllegalStateException { checkState(terminated); } @Override public IsBounded isBounded() { return IsBounded.UNBOUNDED; } }; } @NewWatermarkEstimator public ManualWatermarkEstimator<Instant> newWatermarkEstimator( @WatermarkEstimatorState Instant state) { return new WatermarkEstimators.Manual(state); } private Instant getWatermark() { return Instant.now().minus(watermarkDelay()); } private Duration watermarkDelay() { return pollDuration.multipliedBy(3).dividedBy(2); } }
class GeneratorFn extends DoFn<byte[], SubscriptionPartition> { @ProcessElement public ProcessContinuation processElement( RestrictionTracker<Integer, Integer> restrictionTracker, OutputReceiver<SubscriptionPartition> output, ManualWatermarkEstimator<Instant> estimator) { int previousCount = restrictionTracker.currentRestriction(); int newCount = getPartitionCount.apply(topic); if (!restrictionTracker.tryClaim(newCount)) { return ProcessContinuation.stop(); } if (newCount > previousCount) { for (int i = previousCount; i < newCount; ++i) { output.outputWithTimestamp( SubscriptionPartition.of(subscription, Partition.of(i)), estimator.currentWatermark()); } } estimator.setWatermark(getWatermark()); return ProcessContinuation.resume().withResumeDelay(pollDuration); } @GetInitialWatermarkEstimatorState public Instant getInitialWatermarkEstimatorState(@Timestamp Instant initial) { checkArgument(initial.equals(BoundedWindow.TIMESTAMP_MIN_VALUE)); return initial; } @GetInitialRestriction public Integer getInitialRestriction() { return 0; } @NewTracker public RestrictionTracker<Integer, Integer> newTracker(@Restriction Integer input) { return new RestrictionTracker<Integer, Integer>() { private boolean terminated = false; private int position = input; @Override @Override public Integer currentRestriction() { return position; } @Override public @Nullable SplitResult<Integer> trySplit(double fractionOfRemainder) { if (fractionOfRemainder != 0) { return null; } if (terminated) { return null; } terminated = true; return SplitResult.of(position, position); } @Override public void checkDone() throws IllegalStateException { checkState(terminated); } @Override public IsBounded isBounded() { return IsBounded.UNBOUNDED; } }; } @NewWatermarkEstimator public ManualWatermarkEstimator<Instant> newWatermarkEstimator( @WatermarkEstimatorState Instant state) { return new WatermarkEstimators.Manual(state); } private Instant getWatermark() { return Instant.now().minus(watermarkDelay()); } private Duration watermarkDelay() { return pollDuration.multipliedBy(3).dividedBy(2); } }
Consider including the iteration in the message, to make it clearer for the reader of the message it is actually running more often
protected boolean maintain() { if (iteration % 10 == 0) log.log(LogLevel.INFO, () -> "Running " + SessionsMaintainer.class.getSimpleName()); applicationRepository.deleteExpiredLocalSessions(); if (hostedVespa) { Duration expiryTime = Duration.ofMinutes(90); int deleted = applicationRepository.deleteExpiredRemoteSessions(expiryTime); log.log(LogLevel.FINE, () -> "Deleted " + deleted + " expired remote sessions older than " + expiryTime); } iteration++; return true; }
log.log(LogLevel.INFO, () -> "Running " + SessionsMaintainer.class.getSimpleName());
protected boolean maintain() { if (iteration % 10 == 0) log.log(LogLevel.INFO, () -> "Running " + SessionsMaintainer.class.getSimpleName() + ", iteration " + iteration); applicationRepository.deleteExpiredLocalSessions(); if (hostedVespa) { Duration expiryTime = Duration.ofMinutes(90); int deleted = applicationRepository.deleteExpiredRemoteSessions(expiryTime); log.log(LogLevel.FINE, () -> "Deleted " + deleted + " expired remote sessions older than " + expiryTime); } iteration++; return true; }
class SessionsMaintainer extends ConfigServerMaintainer { private final boolean hostedVespa; private int iteration = 0; SessionsMaintainer(ApplicationRepository applicationRepository, Curator curator, Duration interval, FlagSource flagSource) { super(applicationRepository, curator, flagSource, Duration.ofMinutes(1), interval); this.hostedVespa = applicationRepository.configserverConfig().hostedVespa(); } @Override }
class SessionsMaintainer extends ConfigServerMaintainer { private final boolean hostedVespa; private int iteration = 0; SessionsMaintainer(ApplicationRepository applicationRepository, Curator curator, Duration interval, FlagSource flagSource) { super(applicationRepository, curator, flagSource, Duration.ofMinutes(1), interval); this.hostedVespa = applicationRepository.configserverConfig().hostedVespa(); } @Override }
Why this line has to repeat all the time? Can't we resolve URL at the init and then re-use it?
public void onError(Throwable throwable) { logger.error("Kafka Ballerina server connector retrieved exception: " + throwable.getMessage(), throwable); String url = ((Properties) listener.getNativeData(NATIVE_CONSUMER_CONFIG)).getProperty(BOOTSTRAP_SERVERS); KafkaMetricsUtil.reportConsumerError(url, KafkaObservabilityConstants.ERROR_TYPE_MSG_RECEIVED); }
String url = ((Properties) listener.getNativeData(NATIVE_CONSUMER_CONFIG)).getProperty(BOOTSTRAP_SERVERS);
public void onError(Throwable throwable) { logger.error("Kafka Ballerina server connector retrieved exception: " + throwable.getMessage(), throwable); KafkaMetricsUtil.reportConsumerError(listener, KafkaObservabilityConstants.ERROR_TYPE_MSG_RECEIVED); }
class KafkaListenerImpl implements KafkaListener { private static final Logger logger = LoggerFactory.getLogger(KafkaListenerImpl.class); private Scheduler scheduler; private ObjectValue service; private ObjectValue listener; private ResponseCallback callback; public KafkaListenerImpl(Strand strand, ObjectValue listener, ObjectValue service) { this.scheduler = strand.scheduler; this.listener = listener; this.service = service; callback = new ResponseCallback(); } /** * {@inheritDoc} */ @Override public void onRecordsReceived(ConsumerRecords records, KafkaConsumer kafkaConsumer, String groupId) { listener.addNativeData(NATIVE_CONSUMER, kafkaConsumer); String url = ((Properties) listener.getNativeData(NATIVE_CONSUMER_CONFIG)).getProperty(BOOTSTRAP_SERVERS); executeResource(url, records, groupId); KafkaMetricsUtil.reportConsume(url, records); } /** * {@inheritDoc} */ @Override public void onRecordsReceived(ConsumerRecords records, KafkaConsumer kafkaConsumer, String groupId, KafkaPollCycleFutureListener consumer) { listener.addNativeData(NATIVE_CONSUMER, kafkaConsumer); String url = ((Properties) listener.getNativeData(NATIVE_CONSUMER_CONFIG)).getProperty(BOOTSTRAP_SERVERS); executeResource(url, consumer, records, groupId); KafkaMetricsUtil.reportConsume(url, records); } /** * {@inheritDoc} */ @Override private void executeResource(String url, ConsumerRecords records, String groupId) { if (ObserveUtils.isTracingEnabled()) { Map<String, Object> properties = new HashMap<>(); KafkaObserverContext observerContext = new KafkaObserverContext( KafkaObservabilityConstants.CONTEXT_CONSUMER, url); properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext); Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, callback, properties, getResourceParameters(service, this.listener, records, groupId)); } else { Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, callback, null, getResourceParameters(service, this.listener, records, groupId)); } } private void executeResource(String url, KafkaPollCycleFutureListener consumer, ConsumerRecords records, String groupId) { if (ObserveUtils.isTracingEnabled()) { Map<String, Object> properties = new HashMap<>(); KafkaObserverContext observerContext = new KafkaObserverContext( KafkaObservabilityConstants.CONTEXT_CONSUMER, url); properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext); Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, consumer, properties, getResourceParameters(service, this.listener, records, groupId)); } else { Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, consumer, null, getResourceParameters(service, this.listener, records, groupId)); } } private static class ResponseCallback implements CallableUnitCallback { @Override public void notifySuccess() { } @Override public void notifyFailure(ErrorValue error) { } } }
class KafkaListenerImpl implements KafkaListener { private static final Logger logger = LoggerFactory.getLogger(KafkaListenerImpl.class); private Scheduler scheduler; private ObjectValue service; private ObjectValue listener; private ResponseCallback callback; public KafkaListenerImpl(Strand strand, ObjectValue listener, ObjectValue service) { this.scheduler = strand.scheduler; this.listener = listener; this.service = service; callback = new ResponseCallback(); } /** * {@inheritDoc} */ @Override public void onRecordsReceived(ConsumerRecords records, KafkaConsumer kafkaConsumer, String groupId) { listener.addNativeData(NATIVE_CONSUMER, kafkaConsumer); executeResource(listener, records, groupId); KafkaMetricsUtil.reportConsume(listener, records); } /** * {@inheritDoc} */ @Override public void onRecordsReceived(ConsumerRecords records, KafkaConsumer kafkaConsumer, String groupId, KafkaPollCycleFutureListener consumer) { listener.addNativeData(NATIVE_CONSUMER, kafkaConsumer); executeResource(listener, consumer, records, groupId); KafkaMetricsUtil.reportConsume(listener, records); } /** * {@inheritDoc} */ @Override private void executeResource(ObjectValue listener, ConsumerRecords records, String groupId) { if (ObserveUtils.isTracingEnabled()) { Map<String, Object> properties = getNewObserverContextInProperties(listener); Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, callback, properties, getResourceParameters(service, this.listener, records, groupId)); } else { Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, callback, null, getResourceParameters(service, this.listener, records, groupId)); } } private void executeResource(ObjectValue listener, KafkaPollCycleFutureListener consumer, ConsumerRecords records, String groupId) { if (ObserveUtils.isTracingEnabled()) { Map<String, Object> properties = getNewObserverContextInProperties(listener); Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, consumer, properties, getResourceParameters(service, this.listener, records, groupId)); } else { Executor.submit(this.scheduler, service, KAFKA_RESOURCE_ON_MESSAGE, consumer, null, getResourceParameters(service, this.listener, records, groupId)); } } private Map<String, Object> getNewObserverContextInProperties(ObjectValue listener) { Map<String, Object> properties = new HashMap<>(); KafkaObserverContext observerContext = new KafkaObserverContext( KafkaObservabilityConstants.CONTEXT_CONSUMER, KafkaUtils.getClientId(listener), KafkaUtils.getBootstrapServers(listener)); properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext); return properties; } private static class ResponseCallback implements CallableUnitCallback { @Override public void notifySuccess() { } @Override public void notifyFailure(ErrorValue error) { } } }
Not a problem at all, it's me who did it without consulting! ;) Done.
private static void setupLocalstack() throws Exception { System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, "true"); System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, "true"); now = Instant.ofEpochMilli(Long.divideUnsigned(Instant.now().getMillis(), 1000)); localstackContainer = new LocalStackContainer("0.11.3") .withServices(LocalStackContainer.Service.KINESIS) .withEnv("USE_SSL", "true") .withStartupAttempts(3); localstackContainer.start(); options.setAwsServiceEndpoint( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getServiceEndpoint() .replace("http", "https")); options.setAwsKinesisRegion( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getSigningRegion()); options.setAwsAccessKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSAccessKeyId()); options.setAwsSecretKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSSecretKey()); options.setNumberOfRecords(1000); options.setNumberOfShards(1); options.setAwsKinesisStream("beam_kinesis_test"); options.setAwsVerifyCertificate(false); createStream(options.getAwsKinesisStream()); }
new LocalStackContainer("0.11.3")
private static void setupLocalstack() { now = Instant.ofEpochMilli(Long.divideUnsigned(now.getMillis(), 1000L)); System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, "true"); System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, "true"); localstackContainer = new LocalStackContainer(LOCALSTACK_VERSION) .withServices(LocalStackContainer.Service.KINESIS) .withEnv("USE_SSL", "true") .withStartupAttempts(3); localstackContainer.start(); options.setAwsServiceEndpoint( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getServiceEndpoint() .replace("http", "https")); options.setAwsKinesisRegion( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getSigningRegion()); options.setAwsAccessKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSAccessKeyId()); options.setAwsSecretKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSSecretKey()); options.setNumberOfRecords(1000); options.setNumberOfShards(1); options.setAwsKinesisStream("beam_kinesis_test"); options.setAwsVerifyCertificate(false); }
class KinesisIOIT implements Serializable { @Rule public TestPipeline pipelineWrite = TestPipeline.create(); @Rule public TestPipeline pipelineRead = TestPipeline.create(); private static LocalStackContainer localstackContainer; private static KinesisTestOptions options; private static Instant now = Instant.now(); @BeforeClass public static void setup() throws Exception { PipelineOptionsFactory.register(KinesisTestOptions.class); options = TestPipeline.testingPipelineOptions().as(KinesisTestOptions.class); if (doUseLocalstack()) { setupLocalstack(); } } @AfterClass public static void teardown() { if (doUseLocalstack()) { System.clearProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY); System.clearProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY); localstackContainer.stop(); } } /** Test which write and then read data for a Kinesis stream. */ @Test public void testWriteThenRead() { runWrite(); runRead(); } /** Write test dataset into Kinesis stream. */ private void runWrite() { pipelineWrite .apply("Generate Sequence", GenerateSequence.from(0).to(options.getNumberOfRecords())) .apply("Prepare TestRows", ParDo.of(new TestRow.DeterministicallyConstructTestRowFn())) .apply("Prepare Kinesis input records", ParDo.of(new ConvertToBytes())) .apply( "Write to Kinesis", KinesisIO.write() .withStreamName(options.getAwsKinesisStream()) .withPartitioner(new RandomPartitioner()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate())); pipelineWrite.run().waitUntilFinish(); } /** Read test dataset from Kinesis stream. */ private void runRead() { PCollection<KinesisRecord> output = pipelineRead.apply( KinesisIO.read() .withStreamName(options.getAwsKinesisStream()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate()) .withMaxNumRecords(options.getNumberOfRecords()) .withMaxReadTime(Duration.standardMinutes(10L)) .withInitialPositionInStream(InitialPositionInStream.AT_TIMESTAMP) .withInitialTimestampInStream(now) .withRequestRecordsLimit(1000)); PAssert.thatSingleton(output.apply("Count All", Count.globally())) .isEqualTo((long) options.getNumberOfRecords()); PCollection<String> consolidatedHashcode = output .apply(ParDo.of(new ExtractDataValues())) .apply("Hash row contents", Combine.globally(new HashingFn()).withoutDefaults()); PAssert.that(consolidatedHashcode) .containsInAnyOrder(TestRow.getExpectedHashForRowCount(options.getNumberOfRecords())); pipelineRead.run().waitUntilFinish(); } /** Necessary setup for localstack environment. */ private static void createStream(String streamName) throws Exception { AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard(); clientBuilder.setCredentials(localstackContainer.getDefaultCredentialsProvider()); clientBuilder.setEndpointConfiguration( localstackContainer.getEndpointConfiguration(LocalStackContainer.Service.KINESIS)); AmazonKinesis client = clientBuilder.build(); client.createStream(streamName, 1); int repeats = 10; for (int i = 0; i <= repeats; ++i) { String streamStatus = client.describeStream(streamName).getStreamDescription().getStreamStatus(); if ("ACTIVE".equals(streamStatus)) { break; } if (i == repeats) { throw new RuntimeException("Unable to initialize stream"); } Thread.sleep(1000L); } } /** Check whether pipeline options were provided. If not, use localstack container. */ private static boolean doUseLocalstack() { return "aws-access-key".equals(options.getAwsAccessKey()) && "aws-secret-key".equals(options.getAwsSecretKey()) && "aws-kinesis-stream".equals(options.getAwsKinesisStream()) && "aws-kinesis-region".equals(options.getAwsKinesisRegion()) && options.getNumberOfShards() == 2 && options.getNumberOfRecords() == 1000 && options.getAwsServiceEndpoint() == null && options.getAwsVerifyCertificate(); } /** Produces test rows. */ private static class ConvertToBytes extends DoFn<TestRow, byte[]> { @ProcessElement public void processElement(ProcessContext c) { c.output(String.valueOf(c.element().name()).getBytes(StandardCharsets.UTF_8)); } } /** Read rows from Table. */ private static class ExtractDataValues extends DoFn<KinesisRecord, String> { @ProcessElement public void processElement(ProcessContext c) { c.output(new String(c.element().getDataAsBytes(), StandardCharsets.UTF_8)); } } private static final class RandomPartitioner implements KinesisPartitioner { @Override public String getPartitionKey(byte[] value) { Random rand = new Random(); int n = rand.nextInt(options.getNumberOfShards()) + 1; return String.valueOf(n); } @Override public String getExplicitHashKey(byte[] value) { return null; } } }
class KinesisIOIT implements Serializable { private static final String LOCALSTACK_VERSION = "0.11.3"; @Rule public TestPipeline pipelineWrite = TestPipeline.create(); @Rule public TestPipeline pipelineRead = TestPipeline.create(); private static KinesisTestOptions options; private static AmazonKinesis kinesisClient; private static LocalStackContainer localstackContainer; private static Instant now = Instant.now(); @BeforeClass public static void setup() throws Exception { PipelineOptionsFactory.register(KinesisTestOptions.class); options = TestPipeline.testingPipelineOptions().as(KinesisTestOptions.class); if (options.getUseLocalstack()) { setupLocalstack(); kinesisClient = createKinesisClient(); createStream(options.getAwsKinesisStream()); } } @AfterClass public static void teardown() { if (options.getUseLocalstack()) { kinesisClient.deleteStream(options.getAwsKinesisStream()); System.clearProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY); System.clearProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY); localstackContainer.stop(); } } /** Test which write and then read data for a Kinesis stream. */ @Test public void testWriteThenRead() { runWrite(); runRead(); } /** Write test dataset into Kinesis stream. */ private void runWrite() { pipelineWrite .apply("Generate Sequence", GenerateSequence.from(0).to(options.getNumberOfRecords())) .apply("Prepare TestRows", ParDo.of(new TestRow.DeterministicallyConstructTestRowFn())) .apply("Prepare Kinesis input records", ParDo.of(new ConvertToBytes())) .apply( "Write to Kinesis", KinesisIO.write() .withStreamName(options.getAwsKinesisStream()) .withPartitioner(new RandomPartitioner()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate())); pipelineWrite.run().waitUntilFinish(); } /** Read test dataset from Kinesis stream. */ private void runRead() { PCollection<KinesisRecord> output = pipelineRead.apply( KinesisIO.read() .withStreamName(options.getAwsKinesisStream()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate()) .withMaxNumRecords(options.getNumberOfRecords()) .withMaxReadTime(Duration.standardMinutes(10L)) .withInitialPositionInStream(InitialPositionInStream.AT_TIMESTAMP) .withInitialTimestampInStream(now) .withRequestRecordsLimit(1000)); PAssert.thatSingleton(output.apply("Count All", Count.globally())) .isEqualTo((long) options.getNumberOfRecords()); PCollection<String> consolidatedHashcode = output .apply(ParDo.of(new ExtractDataValues())) .apply("Hash row contents", Combine.globally(new HashingFn()).withoutDefaults()); PAssert.that(consolidatedHashcode) .containsInAnyOrder(TestRow.getExpectedHashForRowCount(options.getNumberOfRecords())); pipelineRead.run().waitUntilFinish(); } /** Necessary setup for localstack environment. */ private static AmazonKinesis createKinesisClient() { AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard(); AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider( new BasicAWSCredentials(options.getAwsAccessKey(), options.getAwsSecretKey())); clientBuilder.setCredentials(credentialsProvider); if (options.getAwsServiceEndpoint() != null) { AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration( options.getAwsServiceEndpoint(), options.getAwsKinesisRegion()); clientBuilder.setEndpointConfiguration(endpointConfiguration); } else { clientBuilder.setRegion(options.getAwsKinesisRegion()); } return clientBuilder.build(); } private static void createStream(String streamName) throws Exception { kinesisClient.createStream(streamName, 1); int repeats = 10; for (int i = 0; i <= repeats; ++i) { String streamStatus = kinesisClient.describeStream(streamName).getStreamDescription().getStreamStatus(); if ("ACTIVE".equals(streamStatus)) { break; } if (i == repeats) { throw new RuntimeException("Unable to initialize stream"); } Thread.sleep(1000L); } } /** Produces test rows. */ private static class ConvertToBytes extends DoFn<TestRow, byte[]> { @ProcessElement public void processElement(ProcessContext c) { c.output(String.valueOf(c.element().name()).getBytes(StandardCharsets.UTF_8)); } } /** Read rows from Table. */ private static class ExtractDataValues extends DoFn<KinesisRecord, String> { @ProcessElement public void processElement(ProcessContext c) { c.output(new String(c.element().getDataAsBytes(), StandardCharsets.UTF_8)); } } private static final class RandomPartitioner implements KinesisPartitioner { @Override public String getPartitionKey(byte[] value) { Random rand = new Random(); int n = rand.nextInt(options.getNumberOfShards()) + 1; return String.valueOf(n); } @Override public String getExplicitHashKey(byte[] value) { return null; } } }
we need to do infer predicate later to process `a join b on a.id = b.id join c on b.id = c.id` it could be transform to `a join c on a.id = c.id join b on b.id = c.id`
public boolean initJoinOnCondition() { List<Expression> topJoinOnClauseConjuncts = ExpressionUtils.extractConjunction(topJoinOnClause); for (Expression topJoinOnClauseConjunct : topJoinOnClauseConjuncts) { if (ExpressionUtils.isIntersecting( topJoinOnClauseConjunct.collect(SlotReference.class::isInstance), aOutputSlots) && ExpressionUtils.isIntersecting( topJoinOnClauseConjunct.collect(SlotReference.class::isInstance), bOutputSlots) && ExpressionUtils.isIntersecting( topJoinOnClauseConjunct.collect(SlotReference.class::isInstance), cOutputSlots) ) { return false; } } List<Expression> bottomJoinOnClauseConjuncts = ExpressionUtils.extractConjunction( bottomJoinOnClause); List<Expression> allOnCondition = Lists.newArrayList(); allOnCondition.addAll(topJoinOnClauseConjuncts); allOnCondition.addAll(bottomJoinOnClauseConjuncts); List<SlotReference> newBottomJoinSlots = Lists.newArrayList(); newBottomJoinSlots.addAll(aOutputSlots); newBottomJoinSlots.addAll(cOutputSlots); for (Expression onCondition : allOnCondition) { List<SlotReference> slots = onCondition.collect(SlotReference.class::isInstance); if (new HashSet<>(newBottomJoinSlots).containsAll(slots)) { newBottomJoinOnCondition.add(onCondition); } else { newTopJoinOnCondition.add(onCondition); } } if (newBottomJoinOnCondition.isEmpty() || newTopJoinOnCondition.isEmpty()) { return false; } return true; }
if (newBottomJoinOnCondition.isEmpty() || newTopJoinOnCondition.isEmpty()) {
public boolean initJoinOnCondition() { List<Expression> topJoinOnClauseConjuncts = ExpressionUtils.extractConjunction(topJoinOnClause); for (Expression topJoinOnClauseConjunct : topJoinOnClauseConjuncts) { List<SlotReference> topJoinUsedSlot = topJoinOnClauseConjunct.collect(SlotReference.class::isInstance); if (ExpressionUtils.isIntersecting(topJoinUsedSlot, aOutputSlots) && ExpressionUtils.isIntersecting(topJoinUsedSlot, bOutputSlots) && ExpressionUtils.isIntersecting(topJoinUsedSlot, cOutputSlots) ) { return false; } } List<Expression> allOnCondition = Lists.newArrayList(); allOnCondition.addAll(topJoinOnClauseConjuncts); allOnCondition.addAll(ExpressionUtils.extractConjunction(bottomJoinOnClause)); HashSet<SlotReference> newBottomJoinSlots = new HashSet<>(aOutputSlots); newBottomJoinSlots.addAll(cOutputSlots); for (Expression onCondition : allOnCondition) { List<SlotReference> slots = onCondition.collect(SlotReference.class::isInstance); if (newBottomJoinSlots.containsAll(slots)) { newBottomJoinOnCondition.add(onCondition); } else { newTopJoinOnCondition.add(onCondition); } } if (newBottomJoinOnCondition.isEmpty() || newTopJoinOnCondition.isEmpty()) { return false; } return true; }
class JoinLAsscomHelper { private final LogicalJoin topJoin; private final LogicalJoin<GroupPlan, GroupPlan> bottomJoin; private final Plan a; private final Plan b; private final Plan c; private final Expression topJoinOnClause; private final Expression bottomJoinOnClause; private final List<SlotReference> aOutputSlots; private final List<SlotReference> bOutputSlots; private final List<SlotReference> cOutputSlots; private final List<Expression> newBottomJoinOnCondition = Lists.newArrayList(); private final List<Expression> newTopJoinOnCondition = Lists.newArrayList(); /** * Init plan and output. */ public JoinLAsscomHelper(LogicalJoin topJoin, LogicalJoin<GroupPlan, GroupPlan> bottomJoin) { this.topJoin = topJoin; this.bottomJoin = bottomJoin; a = bottomJoin.left(); b = bottomJoin.right(); c = (Plan) topJoin.right(); Preconditions.checkArgument(topJoin.getCondition().isPresent(), "topJoin onClause must be present."); topJoinOnClause = (Expression) topJoin.getCondition().get(); Preconditions.checkArgument(bottomJoin.getCondition().isPresent(), "bottomJoin onClause must be present."); bottomJoinOnClause = (Expression) bottomJoin.getCondition().get(); aOutputSlots = Utils.getOutputSlotReference(a); bOutputSlots = Utils.getOutputSlotReference(b); cOutputSlots = Utils.getOutputSlotReference(c); } public static JoinLAsscomHelper of(LogicalJoin topJoin, LogicalJoin<GroupPlan, GroupPlan> bottomJoin) { return new JoinLAsscomHelper(topJoin, bottomJoin); } /** * Get the onCondition of newTopJoin and newBottomJoin. */ /** * Get projectExpr of left and right. * Just for project-inside. */ private Pair<List<NamedExpression>, List<NamedExpression>> getProjectExprs() { Preconditions.checkArgument(topJoin.left() instanceof LogicalProject); LogicalProject project = (LogicalProject) topJoin.left(); List<NamedExpression> projectExprs = project.getProjects(); List<NamedExpression> newRightProjectExprs = Lists.newArrayList(); List<NamedExpression> newLeftProjectExpr = Lists.newArrayList(); for (NamedExpression projectExpr : projectExprs) { List<SlotReference> usedSlotRefs = projectExpr.collect(SlotReference.class::isInstance); if (new HashSet<>(bOutputSlots).containsAll(usedSlotRefs)) { newRightProjectExprs.add(projectExpr); } else { newLeftProjectExpr.add(projectExpr); } } return new Pair<>(newLeftProjectExpr, newRightProjectExprs); } private LogicalJoin<GroupPlan, GroupPlan> newBottomJoin() { return new LogicalJoin( bottomJoin.getJoinType(), Optional.of(ExpressionUtils.and(newBottomJoinOnCondition)), a, c); } /** * Create topJoin for project-inside. */ public LogicalJoin newProjectTopJoin() { List<NamedExpression> newLeftProjectExpr = getProjectExprs().first; List<NamedExpression> newRightProjectExprs = getProjectExprs().second; if (newRightProjectExprs.size() == 0) { return new LogicalJoin( topJoin.getJoinType(), Optional.of(ExpressionUtils.and(newTopJoinOnCondition)), newBottomJoin(), b); } LogicalProject newRightProject = new LogicalProject<>(newRightProjectExprs, b); if (newLeftProjectExpr.size() == 0) { return new LogicalJoin( topJoin.getJoinType(), Optional.of(ExpressionUtils.and(newTopJoinOnCondition)), newBottomJoin(), newRightProject); } else { LogicalProject newLeftProject = new LogicalProject<>(newLeftProjectExpr, newBottomJoin()); return new LogicalJoin( topJoin.getJoinType(), Optional.of(ExpressionUtils.and(newTopJoinOnCondition)), newLeftProject, newRightProject); } } /** * Create topJoin for no-project-inside. */ public LogicalJoin newTopJoin() { return new LogicalJoin( topJoin.getJoinType(), Optional.of(ExpressionUtils.and(newTopJoinOnCondition)), newBottomJoin(), b); } public static boolean check(LogicalJoin topJoin) { if (topJoin.getJoinReorderContext().hasCommute()) { return false; } return true; } }
class JoinLAsscomHelper { /* * topJoin newTopJoin * / \ / \ * bottomJoin C --> newBottomJoin B * / \ / \ * A B A C */ private final LogicalJoin topJoin; private final LogicalJoin<GroupPlan, GroupPlan> bottomJoin; private final Plan a; private final Plan b; private final Plan c; private final Expression topJoinOnClause; private final Expression bottomJoinOnClause; private final List<SlotReference> aOutputSlots; private final List<SlotReference> bOutputSlots; private final List<SlotReference> cOutputSlots; private final List<Expression> newBottomJoinOnCondition = Lists.newArrayList(); private final List<Expression> newTopJoinOnCondition = Lists.newArrayList(); /** * Init plan and output. */ public JoinLAsscomHelper(LogicalJoin<? extends Plan, GroupPlan> topJoin, LogicalJoin<GroupPlan, GroupPlan> bottomJoin) { this.topJoin = topJoin; this.bottomJoin = bottomJoin; a = bottomJoin.left(); b = bottomJoin.right(); c = topJoin.right(); Preconditions.checkArgument(topJoin.getCondition().isPresent(), "topJoin onClause must be present."); topJoinOnClause = topJoin.getCondition().get(); Preconditions.checkArgument(bottomJoin.getCondition().isPresent(), "bottomJoin onClause must be present."); bottomJoinOnClause = bottomJoin.getCondition().get(); aOutputSlots = Utils.getOutputSlotReference(a); bOutputSlots = Utils.getOutputSlotReference(b); cOutputSlots = Utils.getOutputSlotReference(c); } public static JoinLAsscomHelper of(LogicalJoin<? extends Plan, GroupPlan> topJoin, LogicalJoin<GroupPlan, GroupPlan> bottomJoin) { return new JoinLAsscomHelper(topJoin, bottomJoin); } /** * Get the onCondition of newTopJoin and newBottomJoin. */ /** * Get projectExpr of left and right. * Just for project-inside. */ private Pair<List<NamedExpression>, List<NamedExpression>> getProjectExprs() { Preconditions.checkArgument(topJoin.left() instanceof LogicalProject); LogicalProject project = (LogicalProject) topJoin.left(); List<NamedExpression> projectExprs = project.getProjects(); List<NamedExpression> newRightProjectExprs = Lists.newArrayList(); List<NamedExpression> newLeftProjectExpr = Lists.newArrayList(); HashSet<SlotReference> bOutputSlotsSet = new HashSet<>(bOutputSlots); for (NamedExpression projectExpr : projectExprs) { List<SlotReference> usedSlotRefs = projectExpr.collect(SlotReference.class::isInstance); if (bOutputSlotsSet.containsAll(usedSlotRefs)) { newRightProjectExprs.add(projectExpr); } else { newLeftProjectExpr.add(projectExpr); } } return new Pair<>(newLeftProjectExpr, newRightProjectExprs); } private LogicalJoin<GroupPlan, GroupPlan> newBottomJoin() { return new LogicalJoin( bottomJoin.getJoinType(), Optional.of(ExpressionUtils.and(newBottomJoinOnCondition)), a, c); } /** * Create topJoin for project-inside. */ public LogicalJoin newProjectTopJoin() { Plan left; Plan right; List<NamedExpression> newLeftProjectExpr = getProjectExprs().first; List<NamedExpression> newRightProjectExprs = getProjectExprs().second; if (!newLeftProjectExpr.isEmpty()) { left = new LogicalProject<>(newLeftProjectExpr, newBottomJoin()); } else { left = newBottomJoin(); } if (!newRightProjectExprs.isEmpty()) { right = new LogicalProject<>(newRightProjectExprs, b); } else { right = b; } return new LogicalJoin<>( topJoin.getJoinType(), Optional.of(ExpressionUtils.and(newTopJoinOnCondition)), left, right); } /** * Create topJoin for no-project-inside. */ public LogicalJoin newTopJoin() { return new LogicalJoin( topJoin.getJoinType(), Optional.of(ExpressionUtils.and(newTopJoinOnCondition)), newBottomJoin(), b); } public static boolean check(LogicalJoin topJoin) { if (topJoin.getJoinReorderContext().hasCommute()) { return false; } return true; } }
Okay, thanks for the hint. Changed
void assertFind() { ShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration(); ShardingTableRuleConfiguration shardingTableRuleConfiguration = getShardingTableRuleConfiguration(); Map<String, AlgorithmConfiguration> allAlgorithms = getAlgorithms(); ruleConfig.getTables().add(shardingTableRuleConfiguration); ruleConfig.getShardingAlgorithms().putAll(allAlgorithms); ruleConfig.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_DEFAULT_ALGORITHM)); ruleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_DEFAULT_ALGORITHM)); Collection<String> unusedAlgorithmsCollection = UnusedAlgorithmFinder.find(ruleConfig); assertNotNull(unusedAlgorithmsCollection); assertThat(unusedAlgorithmsCollection.size(), is(1)); assertTrue(unusedAlgorithmsCollection.contains(UNUSED_ALGORITHM)); }
Map<String, AlgorithmConfiguration> allAlgorithms = getAlgorithms();
void assertFind() { ShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration(); ShardingTableRuleConfiguration shardingTableRuleConfig = getShardingTableRuleConfiguration(); ruleConfig.getTables().add(shardingTableRuleConfig); ruleConfig.getShardingAlgorithms().putAll(getAlgorithms()); ruleConfig.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_DEFAULT_ALGORITHM)); ruleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_DEFAULT_ALGORITHM)); Collection<String> actual = UnusedAlgorithmFinder.find(ruleConfig); assertNotNull(actual); assertThat(actual.size(), is(1)); assertTrue(actual.contains(UNUSED_ALGORITHM)); }
class UnusedAlgorithmFinderTest { private static final String USED_TABLE_SHARDING_ALGORITHM = "used_table_sharding_algorithm"; private static final String USED_TABLE_SHARDING_DEFAULT_ALGORITHM = "used_table_sharding_default_algorithm"; private static final String USED_DATABASE_SHARDING_ALGORITHM = "used_database_sharding_algorithm"; private static final String USED_DATABASE_SHARDING_DEFAULT_ALGORITHM = "used_database_sharding_default_algorithm"; private static final String UNUSED_ALGORITHM = "unused_algorithm"; @Test private ShardingTableRuleConfiguration getShardingTableRuleConfiguration() { ShardingTableRuleConfiguration shardingTableRuleConfiguration = new ShardingTableRuleConfiguration("t_order", null); shardingTableRuleConfiguration.setTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_ALGORITHM)); shardingTableRuleConfiguration.setDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_ALGORITHM)); return shardingTableRuleConfiguration; } private Map<String, AlgorithmConfiguration> getAlgorithms() { return ImmutableMap.of( USED_DATABASE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), USED_DATABASE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), USED_TABLE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), USED_TABLE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), UNUSED_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties())); } }
class UnusedAlgorithmFinderTest { private static final String USED_TABLE_SHARDING_ALGORITHM = "used_table_sharding_algorithm"; private static final String USED_TABLE_SHARDING_DEFAULT_ALGORITHM = "used_table_sharding_default_algorithm"; private static final String USED_DATABASE_SHARDING_ALGORITHM = "used_database_sharding_algorithm"; private static final String USED_DATABASE_SHARDING_DEFAULT_ALGORITHM = "used_database_sharding_default_algorithm"; private static final String UNUSED_ALGORITHM = "unused_algorithm"; @Test private ShardingTableRuleConfiguration getShardingTableRuleConfiguration() { ShardingTableRuleConfiguration result = new ShardingTableRuleConfiguration("t_order", null); result.setTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_ALGORITHM)); result.setDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_ALGORITHM)); return result; } private Map<String, AlgorithmConfiguration> getAlgorithms() { return ImmutableMap.of( USED_DATABASE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), USED_DATABASE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), USED_TABLE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), USED_TABLE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()), UNUSED_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties())); } }
Is this should be `StorageInfoV2`?
private void getNewImage(HostInfo helperNode) throws IOException { long localImageVersion = 0; Storage storage = new Storage(this.imageDir); localImageVersion = storage.getLatestImageSeq(); try { String hostPort = NetUtils.getHostPortInAccessibleFormat(helperNode.getHost(), Config.http_port); String infoUrl = "http: ResponseBody<StorageInfo> responseBody = MetaHelper .doGet(infoUrl, HTTP_TIMEOUT_SECOND * 1000, StorageInfo.class); if (responseBody.getCode() != RestApiStatusCode.OK.code) { LOG.warn("get image failed,responseBody:{}", responseBody); throw new IOException(responseBody.toString()); } StorageInfo info = responseBody.getData(); long version = info.getImageSeq(); if (version > localImageVersion) { String url = "http: String filename = Storage.IMAGE + "." + version; File dir = new File(this.imageDir); MetaHelper.getRemoteFile(url, HTTP_TIMEOUT_SECOND * 1000, MetaHelper.getFile(filename, dir)); MetaHelper.complete(filename, dir); } else { LOG.warn("get an image with a lower version, localImageVersion: {}, got version: {}", localImageVersion, version); } } catch (Exception e) { throw new IOException(e); } }
.doGet(infoUrl, HTTP_TIMEOUT_SECOND * 1000, StorageInfo.class);
private void getNewImage(HostInfo helperNode) throws IOException { long localImageVersion = 0; Storage storage = new Storage(this.imageDir); localImageVersion = storage.getLatestImageSeq(); try { String hostPort = NetUtils.getHostPortInAccessibleFormat(helperNode.getHost(), Config.http_port); String infoUrl = "http: ResponseBody<StorageInfo> responseBody = MetaHelper .doGet(infoUrl, HTTP_TIMEOUT_SECOND * 1000, StorageInfo.class); if (responseBody.getCode() != RestApiStatusCode.OK.code) { LOG.warn("get image failed,responseBody:{}", responseBody); throw new IOException(responseBody.toString()); } StorageInfo info = responseBody.getData(); long version = info.getImageSeq(); if (version > localImageVersion) { String url = "http: String filename = Storage.IMAGE + "." + version; File dir = new File(this.imageDir); MetaHelper.getRemoteFile(url, HTTP_TIMEOUT_SECOND * 1000, MetaHelper.getFile(filename, dir)); MetaHelper.complete(filename, dir); } else { LOG.warn("get an image with a lower version, localImageVersion: {}, got version: {}", localImageVersion, version); } } catch (Exception e) { throw new IOException(e); } }
class SingletonHolder { private static final Env INSTANCE = new Env(); }
class SingletonHolder { private static final Env INSTANCE = new Env(); }
the issue was preexisting but let's fix the typo: s/is does/is done/
public void createRegistries(BeanContainer container) { log.info("Creating registries"); MetricRegistries.get(MetricRegistry.Type.APPLICATION); MetricRegistries.get(MetricRegistry.Type.BASE); MetricRegistries.get(MetricRegistry.Type.VENDOR); container.instance(MetricRegistries.class).getApplicationRegistry(); }
public void createRegistries(BeanContainer container) { MetricRegistries.get(MetricRegistry.Type.APPLICATION); MetricRegistries.get(MetricRegistry.Type.BASE); MetricRegistries.get(MetricRegistry.Type.VENDOR); container.instance(MetricRegistries.class).getApplicationRegistry(); }
class loading private static final String CURRENT_LOADED_CLASS_COUNT = "classloader.currentLoadedClass.count"; private static final String TOTAL_LOADED_CLASS_COUNT = "classloader.totalLoadedClass.count"; private static final String TOTAL_UNLOADED_CLASS_COUNT = "classloader.totalUnloadedClass.count"; private static final String JVM_UPTIME = "jvm.uptime"; private static final String SYSTEM_LOAD_AVERAGE = "cpu.systemLoadAverage"; private static final String CPU_AVAILABLE_PROCESSORS = "cpu.availableProcessors"; private static final String MEMORY_COMMITTED_NON_HEAP = "memory.committedNonHeap"; private static final String MEMORY_COMMITTED_HEAP = "memory.committedHeap"; private static final String MEMORY_MAX_HEAP = "memory.maxHeap"; private static final String MEMORY_MAX_NON_HEAP = "memory.maxNonHeap"; private static final String MEMORY_USED_HEAP = "memory.usedHeap"; private static final String MEMORY_USED_NON_HEAP = "memory.usedNonHeap"; public void registerVendorMetrics(ShutdownContext shutdown) { MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.VENDOR); List<String> names = new ArrayList<>(); memoryPoolMetrics(registry, names); vendorSpecificMemoryMetrics(registry, names); if (!names.isEmpty()) { shutdown.addShutdownTask(() -> { for (String i : names) { registry.remove(i); } }); } }
class loading private static final String CURRENT_LOADED_CLASS_COUNT = "classloader.currentLoadedClass.count"; private static final String TOTAL_LOADED_CLASS_COUNT = "classloader.totalLoadedClass.count"; private static final String TOTAL_UNLOADED_CLASS_COUNT = "classloader.totalUnloadedClass.count"; private static final String JVM_UPTIME = "jvm.uptime"; private static final String SYSTEM_LOAD_AVERAGE = "cpu.systemLoadAverage"; private static final String CPU_AVAILABLE_PROCESSORS = "cpu.availableProcessors"; private static final String MEMORY_COMMITTED_NON_HEAP = "memory.committedNonHeap"; private static final String MEMORY_COMMITTED_HEAP = "memory.committedHeap"; private static final String MEMORY_MAX_HEAP = "memory.maxHeap"; private static final String MEMORY_MAX_NON_HEAP = "memory.maxNonHeap"; private static final String MEMORY_USED_HEAP = "memory.usedHeap"; private static final String MEMORY_USED_NON_HEAP = "memory.usedNonHeap"; public void registerVendorMetrics(ShutdownContext shutdown) { MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.VENDOR); List<String> names = new ArrayList<>(); memoryPoolMetrics(registry, names); vendorSpecificMemoryMetrics(registry, names); if (!names.isEmpty()) { shutdown.addShutdownTask(() -> { for (String i : names) { registry.remove(i); } }); } }
maybe here we should access directly `identifier.getPhoneNumber().getValue()` as the value is used in both places
public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) { if (identifier == null) { return null; } assertSingleType(identifier); String rawId = identifier.getRawId(); CommunicationIdentifierModelKind kind = identifier.getKind(); if (kind != null) { if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER && identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId()); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER && identifier.getPhoneNumber() != null) { PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber(); Objects.requireNonNull(phoneNumberModel.getValue()); return new PhoneNumberIdentifier(phoneNumberModel.getValue()).setRawId(rawId); } if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER && identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId()); Objects.requireNonNull(teamsUserIdentifierModel.getCloud()); Objects.requireNonNull(rawId); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId); return new UnknownIdentifier(rawId); } if (identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId()); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (identifier.getPhoneNumber() != null) { PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber(); Objects.requireNonNull(phoneNumberModel.getValue()); return new PhoneNumberIdentifier(phoneNumberModel.getValue()).setRawId(rawId); } if (identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId()); Objects.requireNonNull(teamsUserIdentifierModel.getCloud()); Objects.requireNonNull(rawId); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId); return new UnknownIdentifier(rawId); }
PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber();
public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) { if (identifier == null) { return null; } assertSingleType(identifier); String rawId = identifier.getRawId(); CommunicationIdentifierModelKind kind = (identifier.getKind() != null) ? identifier.getKind() : extractKind(identifier); if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER && identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId(), "'ID' of the CommunicationIdentifierModel cannot be null."); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER && identifier.getPhoneNumber() != null) { String phoneNumber = identifier.getPhoneNumber().getValue(); Objects.requireNonNull(phoneNumber, "'PhoneNumber' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new PhoneNumberIdentifier(phoneNumber).setRawId(rawId); } if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER && identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId(), "'UserID' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(teamsUserIdentifierModel.getCloud(), "'Cloud' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new UnknownIdentifier(rawId); }
class CommunicationIdentifierConverter { /** * Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}. */ /** * Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}. */ public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier) throws IllegalArgumentException { if (identifier == null) { return null; } if (identifier instanceof CommunicationUserIdentifier) { CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(communicationUserIdentifier.getRawId()) .setCommunicationUser( new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId())); } if (identifier instanceof PhoneNumberIdentifier) { PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(phoneNumberIdentifier.getRawId()) .setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber())); } if (identifier instanceof MicrosoftTeamsUserIdentifier) { MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(teamsUserIdentifier.getRawId()) .setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel() .setIsAnonymous(teamsUserIdentifier.isAnonymous()) .setUserId(teamsUserIdentifier.getUserId()) .setCloud(CommunicationCloudEnvironmentModel.fromString( teamsUserIdentifier.getCloudEnvironment().toString()))); } if (identifier instanceof UnknownIdentifier) { UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier; return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId()); } throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName())); } private static void assertSingleType(CommunicationIdentifierModel identifier) { CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser(); PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber(); MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser(); ArrayList<String> presentProperties = new ArrayList<>(); if (communicationUser != null) { presentProperties.add(communicationUser.getClass().getName()); } if (phoneNumber != null) { presentProperties.add(phoneNumber.getClass().getName()); } if (microsoftTeamsUser != null) { presentProperties.add(microsoftTeamsUser.getClass().getName()); } if (presentProperties.size() > 1) { throw new IllegalArgumentException( String.format( "Only one of the identifier models in %s should be present.", String.join(", ", presentProperties))); } } }
class CommunicationIdentifierConverter { /** * Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}. */ /** * Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}. */ public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier) throws IllegalArgumentException { if (identifier == null) { return null; } if (identifier instanceof CommunicationUserIdentifier) { CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(communicationUserIdentifier.getRawId()) .setCommunicationUser( new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId())); } if (identifier instanceof PhoneNumberIdentifier) { PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(phoneNumberIdentifier.getRawId()) .setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber())); } if (identifier instanceof MicrosoftTeamsUserIdentifier) { MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(teamsUserIdentifier.getRawId()) .setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel() .setIsAnonymous(teamsUserIdentifier.isAnonymous()) .setUserId(teamsUserIdentifier.getUserId()) .setCloud(CommunicationCloudEnvironmentModel.fromString( teamsUserIdentifier.getCloudEnvironment().toString()))); } if (identifier instanceof UnknownIdentifier) { UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier; return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId()); } throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName())); } private static void assertSingleType(CommunicationIdentifierModel identifier) { CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser(); PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber(); MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser(); ArrayList<String> presentProperties = new ArrayList<>(); if (communicationUser != null) { presentProperties.add(communicationUser.getClass().getName()); } if (phoneNumber != null) { presentProperties.add(phoneNumber.getClass().getName()); } if (microsoftTeamsUser != null) { presentProperties.add(microsoftTeamsUser.getClass().getName()); } if (presentProperties.size() > 1) { throw new IllegalArgumentException( String.format( "Only one of the identifier models in %s should be present.", String.join(", ", presentProperties))); } } private static CommunicationIdentifierModelKind extractKind(CommunicationIdentifierModel identifier) { Objects.requireNonNull(identifier, "CommunicationIdentifierModel cannot be null."); if (identifier.getCommunicationUser() != null) { return CommunicationIdentifierModelKind.COMMUNICATION_USER; } if (identifier.getPhoneNumber() != null) { return CommunicationIdentifierModelKind.PHONE_NUMBER; } if (identifier.getMicrosoftTeamsUser() != null) { return CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER; } return CommunicationIdentifierModelKind.UNKNOWN; } }
This is a known issue tracked via https://github.com/AzureAD/azure-activedirectory-library-for-java/blob/dev/src/main/java/com/microsoft/aad/adal4j/AuthenticationContext.java.
protected RestClient buildRestClient(AzureTokenCredentials credentials, AzureEnvironment.Endpoint endpoint) { RestClient client = restClientBuilder .withBaseUrl(credentials.environment(), endpoint) .withCredentials(credentials) .withInterceptor(new ProviderRegistrationInterceptor(credentials)) .build(); if (client.httpClient().proxy() != null) { credentials.withProxy(client.httpClient().proxy()); } return client; }
credentials.withProxy(client.httpClient().proxy());
protected RestClient buildRestClient(AzureTokenCredentials credentials, AzureEnvironment.Endpoint endpoint) { RestClient client = restClientBuilder .withBaseUrl(credentials.environment(), endpoint) .withCredentials(credentials) .withInterceptor(new ProviderRegistrationInterceptor(credentials)) .build(); if (client.httpClient().proxy() != null) { credentials.withProxy(client.httpClient().proxy()); } return client; }
class AzureConfigurableImpl<T extends AzureConfigurable<T>> implements AzureConfigurable<T> { protected RestClient.Builder restClientBuilder; protected AzureConfigurableImpl() { this.restClientBuilder = new RestClient.Builder() .withSerializerAdapter(new AzureJacksonAdapter()) .withResponseBuilderFactory(new AzureResponseBuilder.Factory()); } @SuppressWarnings("unchecked") @Override public T withLogLevel(LogLevel level) { this.restClientBuilder = this.restClientBuilder.withLogLevel(level); return (T) this; } @SuppressWarnings("unchecked") @Override public T withInterceptor(Interceptor interceptor) { this.restClientBuilder = this.restClientBuilder.withInterceptor(interceptor); return (T) this; } @SuppressWarnings("unchecked") @Override public T withUserAgent(String userAgent) { this.restClientBuilder = this.restClientBuilder.withUserAgent(userAgent); return (T) this; } @SuppressWarnings("unchecked") @Override public T withReadTimeout(long timeout, TimeUnit unit) { this.restClientBuilder = restClientBuilder.withReadTimeout(timeout, unit); return (T) this; } @SuppressWarnings("unchecked") @Override public T withConnectionTimeout(long timeout, TimeUnit unit) { this.restClientBuilder = restClientBuilder.withConnectionTimeout(timeout, unit); return (T) this; } @SuppressWarnings("unchecked") @Override public T withMaxIdleConnections(int maxIdleConnections) { this.restClientBuilder = restClientBuilder.withMaxIdleConnections(maxIdleConnections); return (T) this; } @SuppressWarnings("unchecked") @Override public T withCallbackExecutor(Executor executor) { this.restClientBuilder = restClientBuilder.withCallbackExecutor(executor); return (T) this; } @SuppressWarnings("unchecked") @Override public T withProxy(Proxy proxy) { this.restClientBuilder = restClientBuilder.withProxy(proxy); return (T) this; } @SuppressWarnings("unchecked") @Override public T withProxyAuthenticator(Authenticator proxyAuthenticator) { this.restClientBuilder = restClientBuilder.withProxyAuthenticator(proxyAuthenticator); return (T) this; } protected RestClient buildRestClient(AzureTokenCredentials credentials) { return buildRestClient(credentials, AzureEnvironment.Endpoint.RESOURCE_MANAGER); } }
class AzureConfigurableImpl<T extends AzureConfigurable<T>> implements AzureConfigurable<T> { protected RestClient.Builder restClientBuilder; protected AzureConfigurableImpl() { this.restClientBuilder = new RestClient.Builder() .withSerializerAdapter(new AzureJacksonAdapter()) .withResponseBuilderFactory(new AzureResponseBuilder.Factory()); } @SuppressWarnings("unchecked") @Override public T withLogLevel(LogLevel level) { this.restClientBuilder = this.restClientBuilder.withLogLevel(level); return (T) this; } @SuppressWarnings("unchecked") @Override public T withInterceptor(Interceptor interceptor) { this.restClientBuilder = this.restClientBuilder.withInterceptor(interceptor); return (T) this; } @SuppressWarnings("unchecked") @Override public T withUserAgent(String userAgent) { this.restClientBuilder = this.restClientBuilder.withUserAgent(userAgent); return (T) this; } @SuppressWarnings("unchecked") @Override public T withReadTimeout(long timeout, TimeUnit unit) { this.restClientBuilder = restClientBuilder.withReadTimeout(timeout, unit); return (T) this; } @SuppressWarnings("unchecked") @Override public T withConnectionTimeout(long timeout, TimeUnit unit) { this.restClientBuilder = restClientBuilder.withConnectionTimeout(timeout, unit); return (T) this; } @SuppressWarnings("unchecked") @Override public T withMaxIdleConnections(int maxIdleConnections) { this.restClientBuilder = restClientBuilder.withMaxIdleConnections(maxIdleConnections); return (T) this; } @SuppressWarnings("unchecked") @Override public T withCallbackExecutor(Executor executor) { this.restClientBuilder = restClientBuilder.withCallbackExecutor(executor); return (T) this; } @SuppressWarnings("unchecked") @Override public T withProxy(Proxy proxy) { this.restClientBuilder = restClientBuilder.withProxy(proxy); return (T) this; } @SuppressWarnings("unchecked") @Override public T withProxyAuthenticator(Authenticator proxyAuthenticator) { this.restClientBuilder = restClientBuilder.withProxyAuthenticator(proxyAuthenticator); return (T) this; } protected RestClient buildRestClient(AzureTokenCredentials credentials) { return buildRestClient(credentials, AzureEnvironment.Endpoint.RESOURCE_MANAGER); } }
Besides, can you add unit test for this logic?
public static Optional<SqlNode> convert(final LiteralExpressionSegment segment) { if (null == segment.getLiterals()) { return Optional.of(SqlLiteral.createNull(SqlParserPos.ZERO)); } String literalValue = String.valueOf(segment.getLiterals()); if (TRIM_FUNCTION_FLAGS.contains(literalValue)) { return Optional.of(SqlLiteral.createSymbol(Flag.valueOf(literalValue), SqlParserPos.ZERO)); } if (TIME_UNIT_NAMES.contains(literalValue)) { return Optional.of(new SqlIntervalQualifier(TimeUnit.valueOf(literalValue), null, SqlParserPos.ZERO)); } if (segment.getLiterals() instanceof Integer || segment.getLiterals() instanceof BigDecimal || segment.getLiterals() instanceof Long) { return Optional.of(SqlLiteral.createExactNumeric(literalValue, SqlParserPos.ZERO)); } if (segment.getLiterals() instanceof String) { return Optional.of(SqlLiteral.createCharString(literalValue, SqlParserPos.ZERO)); } return Optional.empty(); }
if (segment.getLiterals() instanceof Integer || segment.getLiterals() instanceof BigDecimal || segment.getLiterals() instanceof Long) {
public static Optional<SqlNode> convert(final LiteralExpressionSegment segment) { if (null == segment.getLiterals()) { return Optional.of(SqlLiteral.createNull(SqlParserPos.ZERO)); } String literalValue = String.valueOf(segment.getLiterals()); if (TRIM_FUNCTION_FLAGS.contains(literalValue)) { return Optional.of(SqlLiteral.createSymbol(Flag.valueOf(literalValue), SqlParserPos.ZERO)); } if (TIME_UNIT_NAMES.contains(literalValue)) { return Optional.of(new SqlIntervalQualifier(TimeUnit.valueOf(literalValue), null, SqlParserPos.ZERO)); } if (segment.getLiterals() instanceof Integer || segment.getLiterals() instanceof BigDecimal || segment.getLiterals() instanceof Number) { return Optional.of(SqlLiteral.createExactNumeric(literalValue, SqlParserPos.ZERO)); } if (segment.getLiterals() instanceof String) { return Optional.of(SqlLiteral.createCharString(literalValue, SqlParserPos.ZERO)); } return Optional.empty(); }
class LiteralExpressionConverter { private static final Collection<String> TRIM_FUNCTION_FLAGS = new HashSet<>(3, 1F); private static final Collection<String> TIME_UNIT_NAMES = new HashSet<>(6, 1F); static { TRIM_FUNCTION_FLAGS.add("BOTH"); TRIM_FUNCTION_FLAGS.add("LEADING"); TRIM_FUNCTION_FLAGS.add("TRAILING"); TIME_UNIT_NAMES.add("YEAR"); TIME_UNIT_NAMES.add("MONTH"); TIME_UNIT_NAMES.add("DAY"); TIME_UNIT_NAMES.add("HOUR"); TIME_UNIT_NAMES.add("MINUTE"); TIME_UNIT_NAMES.add("SECOND"); } /** * Convert literal expression segment to sql node. * * @param segment literal expression segment * @return sql node */ }
class LiteralExpressionConverter { private static final Collection<String> TRIM_FUNCTION_FLAGS = new HashSet<>(3, 1F); private static final Collection<String> TIME_UNIT_NAMES = new HashSet<>(6, 1F); static { TRIM_FUNCTION_FLAGS.add("BOTH"); TRIM_FUNCTION_FLAGS.add("LEADING"); TRIM_FUNCTION_FLAGS.add("TRAILING"); TIME_UNIT_NAMES.add("YEAR"); TIME_UNIT_NAMES.add("MONTH"); TIME_UNIT_NAMES.add("DAY"); TIME_UNIT_NAMES.add("HOUR"); TIME_UNIT_NAMES.add("MINUTE"); TIME_UNIT_NAMES.add("SECOND"); } /** * Convert literal expression segment to sql node. * * @param segment literal expression segment * @return sql node */ }
It seems that only boolean vector dose not need dictionary. byte vector need dictionary for storage, dictionaryIds use runLengthCoding, it will be very small. In reading, we need decode it.
public byte getByte(int i) { if (dictionary == null) { return vector[i]; } else { return (byte) dictionary.decodeToInt(dictionaryIds.vector[i]); } }
return (byte) dictionary.decodeToInt(dictionaryIds.vector[i]);
public byte getByte(int i) { if (dictionary == null) { return vector[i]; } else { return (byte) dictionary.decodeToInt(dictionaryIds.vector[i]); } }
class HeapByteVector extends AbstractHeapVector implements ByteColumnVector { private static final long serialVersionUID = 7216045902943789034L; public byte[] vector; /** * Don't use this except for testing purposes. * * @param len the number of rows */ public HeapByteVector(int len) { super(len); vector = new byte[len]; } @Override }
class HeapByteVector extends AbstractHeapVector implements ByteColumnVector { private static final long serialVersionUID = 7216045902943789034L; public byte[] vector; /** * Don't use this except for testing purposes. * * @param len the number of rows */ public HeapByteVector(int len) { super(len); vector = new byte[len]; } @Override }
Ah. So we probably end up with custom parser code for this anyway, so we don't need those. See following PR.
static String toEndpointsConfig(TestConfig config) throws IOException { Cursor root = new Slime().setObject(); Cursor endpointsArray = root.setArray("endpoints"); config.deployments().get(config.zone()).forEach((cluster, url) -> { Cursor endpointObject = endpointsArray.addObject(); endpointObject.setString("cluster", cluster); endpointObject.setString("url", url.toString()); }); return new String(SlimeUtils.toJsonBytes(root), UTF_8); }
config.deployments().get(config.zone()).forEach((cluster, url) -> {
static String toEndpointsConfig(TestConfig config) throws IOException { Cursor root = new Slime().setObject(); Cursor endpointsArray = root.setArray("endpoints"); config.deployments().get(config.zone()).forEach((cluster, url) -> { Cursor endpointObject = endpointsArray.addObject(); endpointObject.setString("cluster", cluster); endpointObject.setString("url", url.toString()); }); return new String(SlimeUtils.toJsonBytes(root), UTF_8); }
class VespaCliTestRunner implements TestRunner { private static final Logger logger = Logger.getLogger(VespaCliTestRunner.class.getName()); private final SortedMap<Long, LogRecord> log = new ConcurrentSkipListMap<>(); private final Path artifactsPath; private AtomicReference<Status> status = new AtomicReference<>(Status.NOT_STARTED); @Inject public VespaCliTestRunner(VespaCliTestRunnerConfig config) { this(config.artifactsPath().resolve("artifacts")); } VespaCliTestRunner(Path artifactsPath) { this.artifactsPath = artifactsPath; } @Override public Collection<LogRecord> getLog(long after) { return log.tailMap(after + 1).values(); } @Override public Status getStatus() { return status.get(); } @Override public CompletableFuture<?> test(Suite suite, byte[] config) { if (status.getAndSet(RUNNING) == RUNNING) throw new IllegalStateException("Tests already running, not supposed to be started now"); return CompletableFuture.runAsync(() -> runTests(suite, config)); } @Override public boolean isSupported() { return getChildDirectory(artifactsPath, "tests").isPresent(); } void runTests(Suite suite, byte[] config) { Process process = null; try { TestConfig testConfig = TestConfig.fromJson(config); process = testRunProcessBuilder(suite, testConfig).start(); BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); in.lines().forEach(line -> { if (line.length() > 1 << 13) line = line.substring(0, 1 << 13) + " ... (this log entry was truncated due to size)"; log(Level.INFO, line, null); }); status.set(process.waitFor() == 0 ? SUCCESS : process.waitFor() == 3 ? FAILURE : ERROR); } catch (Exception e) { if (process != null) process.destroyForcibly(); log(Level.SEVERE, "Failed running tests", e); status.set(ERROR); } } ProcessBuilder testRunProcessBuilder(Suite suite, TestConfig config) throws IOException { Path suitePath = getChildDirectory(artifactsPath, "tests") .flatMap(testsPath -> getChildDirectory(testsPath, toSuiteDirectoryName(suite))) .orElseThrow(() -> new IllegalStateException("No tests found, for suite '" + suite + "'")); ProcessBuilder builder = new ProcessBuilder("vespa", "test", suitePath.toAbsolutePath().toString(), "--application", config.application().toFullString(), "--endpoints", toEndpointsConfig(config), "--data-plane-public-cert", artifactsPath.resolve("cert").toAbsolutePath().toString(), "--data-plane-private-key", artifactsPath.resolve("key").toAbsolutePath().toString()); builder.redirectErrorStream(true); return builder; } private static String toSuiteDirectoryName(Suite suite) { switch (suite) { case SYSTEM_TEST: return "system-test"; case STAGING_SETUP_TEST: return "staging-setup"; case STAGING_TEST: return "staging-test"; default: throw new IllegalArgumentException("Unsupported test suite '" + suite + "'"); } } private void log(Level level, String message, Throwable thrown) { LogRecord record = new LogRecord(level, message); record.setThrown(thrown); logger.log(record); log.put(record.getSequenceNumber(), record); } private static Optional<Path> getChildDirectory(Path parent, String name) { try (Stream<Path> children = Files.list(parent)) { return children.filter(Files::isDirectory) .filter(path -> path.endsWith(name)) .findAny(); } catch (IOException e) { throw new UncheckedIOException("Failed to list files under " + parent, e); } } }
class VespaCliTestRunner implements TestRunner { private static final Logger logger = Logger.getLogger(VespaCliTestRunner.class.getName()); private final SortedMap<Long, LogRecord> log = new ConcurrentSkipListMap<>(); private final Path artifactsPath; private AtomicReference<Status> status = new AtomicReference<>(Status.NOT_STARTED); @Inject public VespaCliTestRunner(VespaCliTestRunnerConfig config) { this(config.artifactsPath().resolve("artifacts")); } VespaCliTestRunner(Path artifactsPath) { this.artifactsPath = artifactsPath; } @Override public Collection<LogRecord> getLog(long after) { return log.tailMap(after + 1).values(); } @Override public Status getStatus() { return status.get(); } @Override public CompletableFuture<?> test(Suite suite, byte[] config) { if (status.getAndSet(RUNNING) == RUNNING) throw new IllegalStateException("Tests already running, not supposed to be started now"); return CompletableFuture.runAsync(() -> runTests(suite, config)); } @Override public boolean isSupported() { return getChildDirectory(artifactsPath, "tests").isPresent(); } void runTests(Suite suite, byte[] config) { Process process = null; try { TestConfig testConfig = TestConfig.fromJson(config); process = testRunProcessBuilder(suite, testConfig).start(); BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); in.lines().forEach(line -> { if (line.length() > 1 << 13) line = line.substring(0, 1 << 13) + " ... (this log entry was truncated due to size)"; log(Level.INFO, line, null); }); status.set(process.waitFor() == 0 ? SUCCESS : process.waitFor() == 3 ? FAILURE : ERROR); } catch (Exception e) { if (process != null) process.destroyForcibly(); log(Level.SEVERE, "Failed running tests", e); status.set(ERROR); } } ProcessBuilder testRunProcessBuilder(Suite suite, TestConfig config) throws IOException { Path suitePath = getChildDirectory(artifactsPath, "tests") .flatMap(testsPath -> getChildDirectory(testsPath, toSuiteDirectoryName(suite))) .orElseThrow(() -> new IllegalStateException("No tests found, for suite '" + suite + "'")); ProcessBuilder builder = new ProcessBuilder("vespa", "test", suitePath.toAbsolutePath().toString(), "--application", config.application().toFullString(), "--endpoints", toEndpointsConfig(config), "--data-plane-public-cert", artifactsPath.resolve("cert").toAbsolutePath().toString(), "--data-plane-private-key", artifactsPath.resolve("key").toAbsolutePath().toString()); builder.redirectErrorStream(true); return builder; } private static String toSuiteDirectoryName(Suite suite) { switch (suite) { case SYSTEM_TEST: return "system-test"; case STAGING_SETUP_TEST: return "staging-setup"; case STAGING_TEST: return "staging-test"; default: throw new IllegalArgumentException("Unsupported test suite '" + suite + "'"); } } private void log(Level level, String message, Throwable thrown) { LogRecord record = new LogRecord(level, message); record.setThrown(thrown); logger.log(record); log.put(record.getSequenceNumber(), record); } private static Optional<Path> getChildDirectory(Path parent, String name) { try (Stream<Path> children = Files.list(parent)) { return children.filter(Files::isDirectory) .filter(path -> path.endsWith(name)) .findAny(); } catch (IOException e) { throw new UncheckedIOException("Failed to list files under " + parent, e); } } }
consider checking if the validity of the nixes are the same (we should also update this in C++)
public boolean equalTo(Inspector that) { boolean equal = type() == that.type(); if (equal) { switch (type()) { case NIX: break; case BOOL: equal = asBool() == that.asBool(); break; case LONG: equal = asLong() == that.asLong(); break; case DOUBLE: equal = Double.compare(asDouble(), that.asDouble()) == 0; break; case STRING: equal = asString().equals(that.asString()); break; case DATA: equal = Arrays.equals(asData(), that.asData()); break; case ARRAY: { var traverser = new EqualArray(that); traverse(traverser); equal = traverser.isEqual() && (entries() == that.entries()); } break; case OBJECT: { var traverser = new EqualObject(that); traverse(traverser); equal = traverser.isEqual() && (fields() == that.fields()); } break; default: assert(false); break; } } return equal; }
break;
public boolean equalTo(Inspector that) { boolean equal = type() == that.type(); if (equal) { switch (type()) { case NIX: equal = valid() == that.valid(); break; case BOOL: equal = asBool() == that.asBool(); break; case LONG: equal = asLong() == that.asLong(); break; case DOUBLE: equal = Double.compare(asDouble(), that.asDouble()) == 0; break; case STRING: equal = asString().equals(that.asString()); break; case DATA: equal = Arrays.equals(asData(), that.asData()); break; case ARRAY: { var traverser = new EqualArray(that); traverse(traverser); equal = traverser.isEqual() && (entries() == that.entries()); } break; case OBJECT: { var traverser = new EqualObject(that); traverse(traverser); equal = traverser.isEqual() && (fields() == that.fields()); } break; default: assert(false); break; } } return equal; }
class EqualObject extends Equal implements ObjectTraverser { public EqualObject(Inspector rhsInspector) { super(rhsInspector); } @Override public void field(String name, Inspector inspector) { if (equal) { equal = inspector.equalTo(rhsInspector.field(name)); } } }
class EqualObject extends Equal implements ObjectTraverser { public EqualObject(Inspector rhsInspector) { super(rhsInspector); } @Override public void field(String name, Inspector inspector) { if (equal) { equal = inspector.equalTo(rhsInspector.field(name)); } } }
It looks like it would be OK to create a similar failed graph deconstruction task, providing the current graph as both current and failed graph arguments?
public ComponentGraphResult waitForNextGraphGeneration(ComponentGraph oldGraph, Injector fallbackInjector, boolean isInitializing) { try { ComponentGraph newGraph; Collection<Bundle> obsoleteBundles = new HashSet<>(); try { newGraph = waitForNewConfigGenAndCreateGraph(oldGraph, fallbackInjector, isInitializing, obsoleteBundles); newGraph.reuseNodes(oldGraph); } catch (Throwable t) { log.warning("Failed to set up component graph - uninstalling latest bundles. Bootstrap generation: " + getBootstrapGeneration()); osgi.completeBundleGeneration(Osgi.GenerationStatus.FAILURE); throw t; } try { constructComponents(newGraph); } catch (Throwable e) { log.warning("Failed to construct components for generation '" + newGraph.generation() + "' - scheduling partial graph for deconstruction"); Collection<Bundle> newBundlesFromFailedGen = osgi.completeBundleGeneration(Osgi.GenerationStatus.FAILURE); deconstructFailedGraph(oldGraph, newGraph, newBundlesFromFailedGen); throw e; } osgi.completeBundleGeneration(Osgi.GenerationStatus.SUCCESS); Runnable cleanupTask = createPreviousGraphDeconstructionTask(oldGraph, newGraph, obsoleteBundles); return new ComponentGraphResult(newGraph, cleanupTask); } catch (Throwable t) { invalidateGeneration(oldGraph.generation(), t); throw t; } }
}
public ComponentGraphResult waitForNextGraphGeneration(ComponentGraph oldGraph, Injector fallbackInjector, boolean isInitializing) { try { ComponentGraph newGraph; Collection<Bundle> obsoleteBundles = new HashSet<>(); try { newGraph = waitForNewConfigGenAndCreateGraph(oldGraph, fallbackInjector, isInitializing, obsoleteBundles); newGraph.reuseNodes(oldGraph); } catch (Throwable t) { log.warning("Failed to set up component graph - uninstalling latest bundles. Bootstrap generation: " + getBootstrapGeneration()); Collection<Bundle> newBundlesFromFailedGen = osgi.completeBundleGeneration(Osgi.GenerationStatus.FAILURE); deconstructComponentsAndBundles(getBootstrapGeneration(), newBundlesFromFailedGen, List.of()); throw t; } try { constructComponents(newGraph); } catch (Throwable e) { log.warning("Failed to construct components for generation '" + newGraph.generation() + "' - scheduling partial graph for deconstruction"); Collection<Bundle> newBundlesFromFailedGen = osgi.completeBundleGeneration(Osgi.GenerationStatus.FAILURE); deconstructFailedGraph(oldGraph, newGraph, newBundlesFromFailedGen); throw e; } osgi.completeBundleGeneration(Osgi.GenerationStatus.SUCCESS); Runnable cleanupTask = createPreviousGraphDeconstructionTask(oldGraph, newGraph, obsoleteBundles); return new ComponentGraphResult(newGraph, cleanupTask); } catch (Throwable t) { invalidateGeneration(oldGraph.generation(), t); throw t; } }
class Container { private static final Logger log = Logger.getLogger(Container.class.getName()); private final SubscriberFactory subscriberFactory; private final ConfigKey<ApplicationBundlesConfig> applicationBundlesConfigKey; private final ConfigKey<PlatformBundlesConfig> platformBundlesConfigKey; private final ConfigKey<ComponentsConfig> componentsConfigKey; private final ComponentDeconstructor destructor; private final Osgi osgi; private final ConfigRetriever retriever; private List<String> platformBundles; private long previousConfigGeneration = -1L; private long leastGeneration = -1L; public Container(SubscriberFactory subscriberFactory, String configId, ComponentDeconstructor destructor, Osgi osgi) { this.subscriberFactory = subscriberFactory; this.destructor = destructor; this.osgi = osgi; applicationBundlesConfigKey = new ConfigKey<>(ApplicationBundlesConfig.class, configId); platformBundlesConfigKey = new ConfigKey<>(PlatformBundlesConfig.class, configId); componentsConfigKey = new ConfigKey<>(ComponentsConfig.class, configId); var bootstrapKeys = Set.of(applicationBundlesConfigKey, platformBundlesConfigKey, componentsConfigKey); this.retriever = new ConfigRetriever(bootstrapKeys, subscriberFactory); } private void constructComponents(ComponentGraph graph) { graph.nodes().forEach(n -> { if (Thread.interrupted()) throw new UncheckedInterruptedException("Interrupted while constructing component graph", true); n.constructInstance(); }); } private ComponentGraph waitForNewConfigGenAndCreateGraph( ComponentGraph graph, Injector fallbackInjector, boolean isInitializing, Collection<Bundle> obsoleteBundles) { ConfigSnapshot snapshot; while (true) { snapshot = retriever.getConfigs(graph.configKeys(), leastGeneration, isInitializing); if (log.isLoggable(FINE)) log.log(FINE, String.format("getConfigAndCreateGraph:\n" + "graph.configKeys = %s\n" + "graph.generation = %s\n" + "snapshot = %s\n", graph.configKeys(), graph.generation(), snapshot)); if (snapshot instanceof BootstrapConfigs) { if (getBootstrapGeneration() <= previousConfigGeneration) { throw new IllegalStateException(String.format( "Got bootstrap configs out of sequence for old config generation %d.\n" + "Previous config generation is %d", getBootstrapGeneration(), previousConfigGeneration)); } log.log(FINE, () -> "Got new bootstrap generation\n" + configGenerationsString()); if (graph.generation() == 0) { platformBundles = getConfig(platformBundlesConfigKey, snapshot.configs()).bundlePaths(); osgi.installPlatformBundles(platformBundles); } else { throwIfPlatformBundlesChanged(snapshot); } Collection<Bundle> bundlesToRemove = installApplicationBundles(snapshot.configs()); obsoleteBundles.addAll(bundlesToRemove); graph = createComponentGraph(snapshot.configs(), getBootstrapGeneration(), fallbackInjector); } else if (snapshot instanceof ComponentsConfigs) { break; } } log.log(FINE, () -> "Got components configs,\n" + configGenerationsString()); return createAndConfigureComponentGraph(snapshot.configs(), fallbackInjector); } private long getBootstrapGeneration() { return retriever.getBootstrapGeneration(); } private long getComponentsGeneration() { return retriever.getComponentsGeneration(); } private String configGenerationsString() { return String.format("bootstrap generation = %d\n" + "components generation: %d\n" + "previous generation: %d", getBootstrapGeneration(), getComponentsGeneration(), previousConfigGeneration); } private void throwIfPlatformBundlesChanged(ConfigSnapshot snapshot) { var checkPlatformBundles = getConfig(platformBundlesConfigKey, snapshot.configs()).bundlePaths(); if (! checkPlatformBundles.equals(platformBundles)) throw new RuntimeException("Platform bundles are not allowed to change!\nOld: " + platformBundles + "\nNew: " + checkPlatformBundles); } private ComponentGraph createAndConfigureComponentGraph(Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> componentsConfigs, Injector fallbackInjector) { ComponentGraph componentGraph = createComponentGraph(componentsConfigs, getComponentsGeneration(), fallbackInjector); componentGraph.setAvailableConfigs(componentsConfigs); return componentGraph; } private void deconstructFailedGraph(ComponentGraph currentGraph, ComponentGraph failedGraph, Collection<Bundle> bundlesFromFailedGraph) { Set<Object> currentComponents = Collections.newSetFromMap(new IdentityHashMap<>(currentGraph.size())); currentComponents.addAll(currentGraph.allConstructedComponentsAndProviders()); List<Object> unusedComponents = new ArrayList<>(); for (Object component : failedGraph.allConstructedComponentsAndProviders()) { if (!currentComponents.contains(component)) unusedComponents.add(component); } destructor.deconstruct(failedGraph.generation(), unusedComponents, bundlesFromFailedGraph); } private Runnable createPreviousGraphDeconstructionTask(ComponentGraph oldGraph, ComponentGraph newGraph, Collection<Bundle> obsoleteBundles) { Map<Object, ?> newComponents = new IdentityHashMap<>(newGraph.size()); for (Object component : newGraph.allConstructedComponentsAndProviders()) newComponents.put(component, null); List<Object> obsoleteComponents = new ArrayList<>(); for (Object component : oldGraph.allConstructedComponentsAndProviders()) if ( ! newComponents.containsKey(component)) obsoleteComponents.add(component); return () -> destructor.deconstruct(oldGraph.generation(), obsoleteComponents, obsoleteBundles); } private Set<Bundle> installApplicationBundles(Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> configsIncludingBootstrapConfigs) { ApplicationBundlesConfig applicationBundlesConfig = getConfig(applicationBundlesConfigKey, configsIncludingBootstrapConfigs); return osgi.useApplicationBundles(applicationBundlesConfig.bundles(), getBootstrapGeneration()); } private ComponentGraph createComponentGraph(Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> configsIncludingBootstrapConfigs, long generation, Injector fallbackInjector) { previousConfigGeneration = generation; ComponentGraph graph = new ComponentGraph(generation); ComponentsConfig componentsConfig = getConfig(componentsConfigKey, configsIncludingBootstrapConfigs); if (componentsConfig == null) { throw new ConfigurationRuntimeException("The set of all configs does not include a valid 'components' config. Config set: " + configsIncludingBootstrapConfigs.keySet()); } addNodes(componentsConfig, graph); injectNodes(componentsConfig, graph); graph.complete(fallbackInjector); return graph; } private void addNodes(ComponentsConfig componentsConfig, ComponentGraph graph) { for (ComponentsConfig.Components config : componentsConfig.components()) { BundleInstantiationSpecification specification = bundleInstantiationSpecification(config); Class<?> componentClass = osgi.resolveClass(specification); Node componentNode = new ComponentNode(specification.id, config.configId(), componentClass, null); graph.add(componentNode); } } private void injectNodes(ComponentsConfig config, ComponentGraph graph) { for (ComponentsConfig.Components component : config.components()) { Node componentNode = ComponentGraph.getNode(graph, component.id()); for (ComponentsConfig.Components.Inject inject : component.inject()) { componentNode.inject(ComponentGraph.getNode(graph, inject.id())); } } } private void invalidateGeneration(long generation, Throwable cause) { leastGeneration = Math.max(retriever.getComponentsGeneration(), retriever.getBootstrapGeneration()) + 1; if (!(cause instanceof InterruptedException) && !(cause instanceof ConfigInterruptedException) && !(cause instanceof SubscriberClosedException)) { log.log(Level.WARNING, newGraphErrorMessage(generation, cause), cause); } } private static String newGraphErrorMessage(long generation, Throwable cause) { String failedFirstMessage = "Failed to set up first component graph"; String failedNewMessage = "Failed to set up new component graph"; String constructMessage = " due to error when constructing one of the components"; String retainMessage = ". Retaining previous component generation."; if (generation == 0) { if (cause instanceof ComponentNode.ComponentConstructorException) { return failedFirstMessage + constructMessage; } else { return failedFirstMessage; } } else { if (cause instanceof ComponentNode.ComponentConstructorException) { return failedNewMessage + constructMessage + retainMessage; } else { return failedNewMessage + retainMessage; } } } public void shutdown(ComponentGraph graph) { shutdownConfigRetriever(); if (graph != null) { scheduleGraphForDeconstruction(graph); destructor.shutdown(); } } public void shutdownConfigRetriever() { retriever.shutdown(); } public void reloadConfig(long generation) { subscriberFactory.reloadActiveSubscribers(generation); } private void scheduleGraphForDeconstruction(ComponentGraph graph) { destructor.deconstruct(graph.generation(), graph.allConstructedComponentsAndProviders(), List.of()); } public static <T extends ConfigInstance> T getConfig(ConfigKey<T> key, Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> configs) { ConfigInstance inst = configs.get(key); if (inst == null || key.getConfigClass() == null) { throw new RuntimeException("Missing config " + key); } return key.getConfigClass().cast(inst); } private static BundleInstantiationSpecification bundleInstantiationSpecification(ComponentsConfig.Components config) { return BundleInstantiationSpecification.fromStrings(config.id(), config.classId(), config.bundle()); } public static class ComponentGraphResult { private final ComponentGraph newGraph; private final Runnable oldComponentsCleanupTask; public ComponentGraphResult(ComponentGraph newGraph, Runnable oldComponentsCleanupTask) { this.newGraph = newGraph; this.oldComponentsCleanupTask = oldComponentsCleanupTask; } public ComponentGraph newGraph() { return newGraph; } public Runnable oldComponentsCleanupTask() { return oldComponentsCleanupTask; } } }
class Container { private static final Logger log = Logger.getLogger(Container.class.getName()); private final SubscriberFactory subscriberFactory; private final ConfigKey<ApplicationBundlesConfig> applicationBundlesConfigKey; private final ConfigKey<PlatformBundlesConfig> platformBundlesConfigKey; private final ConfigKey<ComponentsConfig> componentsConfigKey; private final ComponentDeconstructor destructor; private final Osgi osgi; private final ConfigRetriever retriever; private List<String> platformBundles; private long previousConfigGeneration = -1L; private long leastGeneration = -1L; public Container(SubscriberFactory subscriberFactory, String configId, ComponentDeconstructor destructor, Osgi osgi) { this.subscriberFactory = subscriberFactory; this.destructor = destructor; this.osgi = osgi; applicationBundlesConfigKey = new ConfigKey<>(ApplicationBundlesConfig.class, configId); platformBundlesConfigKey = new ConfigKey<>(PlatformBundlesConfig.class, configId); componentsConfigKey = new ConfigKey<>(ComponentsConfig.class, configId); var bootstrapKeys = Set.of(applicationBundlesConfigKey, platformBundlesConfigKey, componentsConfigKey); this.retriever = new ConfigRetriever(bootstrapKeys, subscriberFactory); } private void constructComponents(ComponentGraph graph) { graph.nodes().forEach(n -> { if (Thread.interrupted()) throw new UncheckedInterruptedException("Interrupted while constructing component graph", true); n.constructInstance(); }); } private ComponentGraph waitForNewConfigGenAndCreateGraph( ComponentGraph graph, Injector fallbackInjector, boolean isInitializing, Collection<Bundle> obsoleteBundles) { ConfigSnapshot snapshot; while (true) { snapshot = retriever.getConfigs(graph.configKeys(), leastGeneration, isInitializing); if (log.isLoggable(FINE)) log.log(FINE, String.format("getConfigAndCreateGraph:\n" + "graph.configKeys = %s\n" + "graph.generation = %s\n" + "snapshot = %s\n", graph.configKeys(), graph.generation(), snapshot)); if (snapshot instanceof BootstrapConfigs) { if (getBootstrapGeneration() <= previousConfigGeneration) { throw new IllegalStateException(String.format( "Got bootstrap configs out of sequence for old config generation %d.\n" + "Previous config generation is %d", getBootstrapGeneration(), previousConfigGeneration)); } log.log(FINE, () -> "Got new bootstrap generation\n" + configGenerationsString()); if (graph.generation() == 0) { platformBundles = getConfig(platformBundlesConfigKey, snapshot.configs()).bundlePaths(); osgi.installPlatformBundles(platformBundles); } else { throwIfPlatformBundlesChanged(snapshot); } Collection<Bundle> bundlesToRemove = installApplicationBundles(snapshot.configs()); obsoleteBundles.addAll(bundlesToRemove); graph = createComponentGraph(snapshot.configs(), getBootstrapGeneration(), fallbackInjector); } else if (snapshot instanceof ComponentsConfigs) { break; } } log.log(FINE, () -> "Got components configs,\n" + configGenerationsString()); return createAndConfigureComponentGraph(snapshot.configs(), fallbackInjector); } private long getBootstrapGeneration() { return retriever.getBootstrapGeneration(); } private long getComponentsGeneration() { return retriever.getComponentsGeneration(); } private String configGenerationsString() { return String.format("bootstrap generation = %d\n" + "components generation: %d\n" + "previous generation: %d", getBootstrapGeneration(), getComponentsGeneration(), previousConfigGeneration); } private void throwIfPlatformBundlesChanged(ConfigSnapshot snapshot) { var checkPlatformBundles = getConfig(platformBundlesConfigKey, snapshot.configs()).bundlePaths(); if (! checkPlatformBundles.equals(platformBundles)) throw new RuntimeException("Platform bundles are not allowed to change!\nOld: " + platformBundles + "\nNew: " + checkPlatformBundles); } private ComponentGraph createAndConfigureComponentGraph(Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> componentsConfigs, Injector fallbackInjector) { ComponentGraph componentGraph = createComponentGraph(componentsConfigs, getComponentsGeneration(), fallbackInjector); componentGraph.setAvailableConfigs(componentsConfigs); return componentGraph; } private void deconstructFailedGraph(ComponentGraph currentGraph, ComponentGraph failedGraph, Collection<Bundle> bundlesFromFailedGraph) { Set<Object> currentComponents = Collections.newSetFromMap(new IdentityHashMap<>(currentGraph.size())); currentComponents.addAll(currentGraph.allConstructedComponentsAndProviders()); List<Object> unusedComponents = new ArrayList<>(); for (Object component : failedGraph.allConstructedComponentsAndProviders()) { if (!currentComponents.contains(component)) unusedComponents.add(component); } deconstructComponentsAndBundles(failedGraph.generation(), bundlesFromFailedGraph, unusedComponents); } private void deconstructComponentsAndBundles(long generation, Collection<Bundle> bundlesFromFailedGraph, List<Object> unusedComponents) { destructor.deconstruct(generation, unusedComponents, bundlesFromFailedGraph); } private Runnable createPreviousGraphDeconstructionTask(ComponentGraph oldGraph, ComponentGraph newGraph, Collection<Bundle> obsoleteBundles) { Map<Object, ?> newComponents = new IdentityHashMap<>(newGraph.size()); for (Object component : newGraph.allConstructedComponentsAndProviders()) newComponents.put(component, null); List<Object> obsoleteComponents = new ArrayList<>(); for (Object component : oldGraph.allConstructedComponentsAndProviders()) if ( ! newComponents.containsKey(component)) obsoleteComponents.add(component); return () -> destructor.deconstruct(oldGraph.generation(), obsoleteComponents, obsoleteBundles); } private Set<Bundle> installApplicationBundles(Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> configsIncludingBootstrapConfigs) { ApplicationBundlesConfig applicationBundlesConfig = getConfig(applicationBundlesConfigKey, configsIncludingBootstrapConfigs); return osgi.useApplicationBundles(applicationBundlesConfig.bundles(), getBootstrapGeneration()); } private ComponentGraph createComponentGraph(Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> configsIncludingBootstrapConfigs, long generation, Injector fallbackInjector) { previousConfigGeneration = generation; ComponentGraph graph = new ComponentGraph(generation); ComponentsConfig componentsConfig = getConfig(componentsConfigKey, configsIncludingBootstrapConfigs); if (componentsConfig == null) { throw new ConfigurationRuntimeException("The set of all configs does not include a valid 'components' config. Config set: " + configsIncludingBootstrapConfigs.keySet()); } addNodes(componentsConfig, graph); injectNodes(componentsConfig, graph); graph.complete(fallbackInjector); return graph; } private void addNodes(ComponentsConfig componentsConfig, ComponentGraph graph) { for (ComponentsConfig.Components config : componentsConfig.components()) { BundleInstantiationSpecification specification = bundleInstantiationSpecification(config); Class<?> componentClass = osgi.resolveClass(specification); Node componentNode = new ComponentNode(specification.id, config.configId(), componentClass, null); graph.add(componentNode); } } private void injectNodes(ComponentsConfig config, ComponentGraph graph) { for (ComponentsConfig.Components component : config.components()) { Node componentNode = ComponentGraph.getNode(graph, component.id()); for (ComponentsConfig.Components.Inject inject : component.inject()) { componentNode.inject(ComponentGraph.getNode(graph, inject.id())); } } } private void invalidateGeneration(long generation, Throwable cause) { leastGeneration = Math.max(retriever.getComponentsGeneration(), retriever.getBootstrapGeneration()) + 1; if (!(cause instanceof InterruptedException) && !(cause instanceof ConfigInterruptedException) && !(cause instanceof SubscriberClosedException)) { log.log(Level.WARNING, newGraphErrorMessage(generation, cause), cause); } } private static String newGraphErrorMessage(long generation, Throwable cause) { String failedFirstMessage = "Failed to set up first component graph"; String failedNewMessage = "Failed to set up new component graph"; String constructMessage = " due to error when constructing one of the components"; String retainMessage = ". Retaining previous component generation."; if (generation == 0) { if (cause instanceof ComponentNode.ComponentConstructorException) { return failedFirstMessage + constructMessage; } else { return failedFirstMessage; } } else { if (cause instanceof ComponentNode.ComponentConstructorException) { return failedNewMessage + constructMessage + retainMessage; } else { return failedNewMessage + retainMessage; } } } public void shutdown(ComponentGraph graph) { shutdownConfigRetriever(); if (graph != null) { deconstructComponentsAndBundles(graph.generation(), List.of(), graph.allConstructedComponentsAndProviders()); destructor.shutdown(); } } public void shutdownConfigRetriever() { retriever.shutdown(); } public void reloadConfig(long generation) { subscriberFactory.reloadActiveSubscribers(generation); } public static <T extends ConfigInstance> T getConfig(ConfigKey<T> key, Map<ConfigKey<? extends ConfigInstance>, ConfigInstance> configs) { ConfigInstance inst = configs.get(key); if (inst == null || key.getConfigClass() == null) { throw new RuntimeException("Missing config " + key); } return key.getConfigClass().cast(inst); } private static BundleInstantiationSpecification bundleInstantiationSpecification(ComponentsConfig.Components config) { return BundleInstantiationSpecification.fromStrings(config.id(), config.classId(), config.bundle()); } public static class ComponentGraphResult { private final ComponentGraph newGraph; private final Runnable oldComponentsCleanupTask; public ComponentGraphResult(ComponentGraph newGraph, Runnable oldComponentsCleanupTask) { this.newGraph = newGraph; this.oldComponentsCleanupTask = oldComponentsCleanupTask; } public ComponentGraph newGraph() { return newGraph; } public Runnable oldComponentsCleanupTask() { return oldComponentsCleanupTask; } } }