comment
stringlengths 16
8.84k
| method_body
stringlengths 37
239k
| target_code
stringlengths 0
242
| method_body_after
stringlengths 29
239k
| context_before
stringlengths 14
424k
| context_after
stringlengths 14
284k
|
---|---|---|---|---|---|
We should be break if we met a floating point type column. The following case will get wrong result in your logic: `k1 int, k2 float, k3 int` | private void analyzeOrderByClause() throws AnalysisException {
if (selectStmt.getOrderByElements() == null) {
/**
* The keys type of Materialized view is aggregation.
* All of group by columns are keys of materialized view.
*/
if (mvKeysType == KeysType.AGG_KEYS) {
for (MVColumnItem mvColumnItem : mvColumnItemList) {
if (mvColumnItem.getAggregationType() != null) {
break;
}
if (mvColumnItem.getType().isFloatingPointType()) {
throw new AnalysisException("Float or double can not used as a key, use decimal instead.");
}
mvColumnItem.setIsKey(true);
}
return;
}
/**
* There is no aggregation function in materialized view.
* Supplement key of MV columns
* For example: select k1, k2 ... kn from t1
* The default key columns are first 36 bytes of the columns in define order.
* If the number of columns in the first 36 is less than 3, the first 3 columns will be used.
* column: k1, k2, k3... km. The key is true.
* Supplement non-key of MV columns
* column: km... kn. The key is false, aggregation type is none, isAggregationTypeImplicit is true.
*/
int keyStorageLayoutBytes = 0;
for (int i = 0; i < selectStmt.getResultExprs().size(); i++) {
MVColumnItem mvColumnItem = mvColumnItemList.get(i);
Expr resultColumn = selectStmt.getResultExprs().get(i);
keyStorageLayoutBytes += resultColumn.getType().getStorageLayoutBytes();
if ((!mvColumnItem.getType().isFloatingPointType())
&& ((i + 1) <= FeConstants.shortkey_max_column_count
|| keyStorageLayoutBytes < FeConstants.shortkey_maxsize_bytes)) {
mvColumnItem.setIsKey(true);
} else {
if (i == 0) {
throw new AnalysisException("The first column could not be float or double, "
+ "use decimal instead.");
}
mvColumnItem.setAggregationType(AggregateType.NONE, true);
}
}
return;
}
List<OrderByElement> orderByElements = selectStmt.getOrderByElements();
if (orderByElements.size() > mvColumnItemList.size()) {
throw new AnalysisException("The number of columns in order clause must be less then "
+ "the number of columns in select clause");
}
if (beginIndexOfAggregation != -1 && (orderByElements.size() != (beginIndexOfAggregation))) {
throw new AnalysisException("The key of columns in mv must be all of group by columns");
}
for (int i = 0; i < orderByElements.size(); i++) {
Expr orderByElement = orderByElements.get(i).getExpr();
if (!(orderByElement instanceof SlotRef)) {
throw new AnalysisException("The column in order clause must be original column without calculation. "
+ "Error column: " + orderByElement.toSql());
}
MVColumnItem mvColumnItem = mvColumnItemList.get(i);
SlotRef slotRef = (SlotRef) orderByElement;
if (!mvColumnItem.getName().equalsIgnoreCase(slotRef.getColumnName())) {
throw new AnalysisException("The order of columns in order by clause must be same as "
+ "the order of columns in select list");
}
Preconditions.checkState(mvColumnItem.getAggregationType() == null);
mvColumnItem.setIsKey(true);
}
for (MVColumnItem mvColumnItem : mvColumnItemList) {
if (mvColumnItem.isKey()) {
continue;
}
if (mvColumnItem.getAggregationType() != null) {
break;
}
mvColumnItem.setAggregationType(AggregateType.NONE, true);
}
} | if ((!mvColumnItem.getType().isFloatingPointType()) | private void analyzeOrderByClause() throws AnalysisException {
if (selectStmt.getOrderByElements() == null) {
supplyOrderColumn();
return;
}
List<OrderByElement> orderByElements = selectStmt.getOrderByElements();
if (orderByElements.size() > mvColumnItemList.size()) {
throw new AnalysisException("The number of columns in order clause must be less then " + "the number of "
+ "columns in select clause");
}
if (beginIndexOfAggregation != -1 && (orderByElements.size() != (beginIndexOfAggregation))) {
throw new AnalysisException("The key of columns in mv must be all of group by columns");
}
for (int i = 0; i < orderByElements.size(); i++) {
Expr orderByElement = orderByElements.get(i).getExpr();
if (!(orderByElement instanceof SlotRef)) {
throw new AnalysisException("The column in order clause must be original column without calculation. "
+ "Error column: " + orderByElement.toSql());
}
MVColumnItem mvColumnItem = mvColumnItemList.get(i);
SlotRef slotRef = (SlotRef) orderByElement;
if (!mvColumnItem.getName().equalsIgnoreCase(slotRef.getColumnName())) {
throw new AnalysisException("The order of columns in order by clause must be same as "
+ "the order of columns in select list");
}
Preconditions.checkState(mvColumnItem.getAggregationType() == null);
mvColumnItem.setIsKey(true);
}
for (MVColumnItem mvColumnItem : mvColumnItemList) {
if (mvColumnItem.isKey()) {
continue;
}
if (mvColumnItem.getAggregationType() != null) {
break;
}
mvColumnItem.setAggregationType(AggregateType.NONE, true);
}
} | class CreateMaterializedViewStmt extends DdlStmt {
public static final String MATERIALIZED_VIEW_NAME_PRFIX = "__doris_materialized_view_";
private String mvName;
private SelectStmt selectStmt;
private Map<String, String> properties;
private int beginIndexOfAggregation = -1;
/**
* origin stmt: select k1, k2, v1, sum(v2) from base_table group by k1, k2, v1
* mvColumnItemList: [k1: {name: k1, isKey: true, aggType: null, isAggregationTypeImplicit: false},
* k2: {name: k2, isKey: true, aggType: null, isAggregationTypeImplicit: false},
* v1: {name: v1, isKey: true, aggType: null, isAggregationTypeImplicit: false},
* v2: {name: v2, isKey: false, aggType: sum, isAggregationTypeImplicit: false}]
* This order of mvColumnItemList is meaningful.
*/
private List<MVColumnItem> mvColumnItemList = Lists.newArrayList();
private String baseIndexName;
private String dbName;
private KeysType mvKeysType = KeysType.DUP_KEYS;
public CreateMaterializedViewStmt(String mvName, SelectStmt selectStmt,
Map<String, String> properties) {
this.mvName = mvName;
this.selectStmt = selectStmt;
this.properties = properties;
}
public String getMVName() {
return mvName;
}
public List<MVColumnItem> getMVColumnItemList() {
return mvColumnItemList;
}
public String getBaseIndexName() {
return baseIndexName;
}
public Map<String, String> getProperties() {
return properties;
}
public String getDBName() {
return dbName;
}
public KeysType getMVKeysType() {
return mvKeysType;
}
@Override
public void analyze(Analyzer analyzer) throws UserException {
if (!Config.enable_materialized_view) {
throw new AnalysisException("The materialized view is disabled");
}
super.analyze(analyzer);
FeNameFormat.checkTableName(mvName);
selectStmt.analyze(analyzer);
if (selectStmt.getAggInfo() != null) {
mvKeysType = KeysType.AGG_KEYS;
}
analyzeSelectClause();
analyzeFromClause();
if (selectStmt.getWhereClause() != null) {
throw new AnalysisException("The where clause is not supported in add materialized view clause, expr:"
+ selectStmt.getWhereClause().toSql());
}
if (selectStmt.getHavingPred() != null) {
throw new AnalysisException("The having clause is not supported in add materialized view clause, expr:"
+ selectStmt.getHavingPred().toSql());
}
analyzeOrderByClause();
if (selectStmt.getLimit() != -1) {
throw new AnalysisException("The limit clause is not supported in add materialized view clause, expr:"
+ " limit " + selectStmt.getLimit());
}
}
public void analyzeSelectClause() throws AnalysisException {
SelectList selectList = selectStmt.getSelectList();
if (selectList.getItems().isEmpty()) {
throw new AnalysisException("The materialized view must contain at least one column");
}
boolean meetAggregate = false;
Set<String> mvColumnNameSet = Sets.newHashSet();
/**
* 1. The columns of mv must be a single column or a aggregate column without any calculate.
* Also the children of aggregate column must be a single column without any calculate.
* For example:
* a, sum(b) is legal.
* a+b, sum(a+b) is illegal.
* 2. The SUM, MIN, MAX function is supported. The other function will be supported in the future.
* 3. The aggregate column must be declared after the single column.
*/
for (int i = 0; i < selectList.getItems().size(); i++) {
SelectListItem selectListItem = selectList.getItems().get(i);
Expr selectListItemExpr = selectListItem.getExpr();
if (!(selectListItemExpr instanceof SlotRef) && !(selectListItemExpr instanceof FunctionCallExpr)) {
throw new AnalysisException("The materialized view only support the single column or function expr. "
+ "Error column: " + selectListItemExpr.toSql());
}
if (selectListItem.getExpr() instanceof SlotRef) {
if (meetAggregate) {
throw new AnalysisException("The aggregate column should be after the single column");
}
SlotRef slotRef = (SlotRef) selectListItem.getExpr();
String columnName = slotRef.getColumnName().toLowerCase();
if (!mvColumnNameSet.add(columnName)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName);
}
MVColumnItem mvColumnItem = new MVColumnItem(columnName);
mvColumnItem.setType(slotRef.getType().getPrimitiveType());
mvColumnItemList.add(mvColumnItem);
} else if (selectListItem.getExpr() instanceof FunctionCallExpr) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) selectListItem.getExpr();
String functionName = functionCallExpr.getFnName().getFunction();
Expr defineExpr = null;
if (!functionName.equalsIgnoreCase("sum")
&& !functionName.equalsIgnoreCase("min")
&& !functionName.equalsIgnoreCase("max")) {
throw new AnalysisException("The materialized view only support the sum, min and max aggregate "
+ "function. Error function: " + functionCallExpr.toSqlImpl());
}
Preconditions.checkState(functionCallExpr.getChildren().size() == 1);
Expr functionChild0 = functionCallExpr.getChild(0);
SlotRef slotRef;
if (functionChild0 instanceof SlotRef) {
slotRef = (SlotRef) functionChild0;
}
else if (functionChild0 instanceof CastExpr
&& (functionChild0.getChild(0) instanceof SlotRef)) {
slotRef = (SlotRef) functionChild0.getChild(0);
} else {
throw new AnalysisException("The children of aggregate function only support one original column. "
+ "Error function: " + functionCallExpr.toSqlImpl());
}
meetAggregate = true;
String columnName = slotRef.getColumnName().toLowerCase();
if (!mvColumnNameSet.add(columnName)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName);
}
if (beginIndexOfAggregation == -1) {
beginIndexOfAggregation = i;
}
MVColumnItem mvColumnItem = new MVColumnItem(columnName);
mvColumnItem.setAggregationType(AggregateType.valueOf(functionName.toUpperCase()), false);
mvColumnItem.setDefineExpr(defineExpr);
mvColumnItemList.add(mvColumnItem);
}
}
if (beginIndexOfAggregation == 0) {
throw new AnalysisException("The materialized view must contain at least one key column");
}
}
private void analyzeFromClause() throws AnalysisException {
List<TableRef> tableRefList = selectStmt.getTableRefs();
if (tableRefList.size() != 1) {
throw new AnalysisException("The materialized view only support one table in from clause.");
}
TableName tableName = tableRefList.get(0).getName();
baseIndexName = tableName.getTbl();
dbName = tableName.getDb();
}
@Override
public String toSql() {
return null;
}
} | class CreateMaterializedViewStmt extends DdlStmt {
public static final String MATERIALIZED_VIEW_NAME_PRFIX = "__doris_materialized_view_";
private String mvName;
private SelectStmt selectStmt;
private Map<String, String> properties;
private int beginIndexOfAggregation = -1;
/**
* origin stmt: select k1, k2, v1, sum(v2) from base_table group by k1, k2, v1
* mvColumnItemList: [k1: {name: k1, isKey: true, aggType: null, isAggregationTypeImplicit: false},
* k2: {name: k2, isKey: true, aggType: null, isAggregationTypeImplicit: false},
* v1: {name: v1, isKey: true, aggType: null, isAggregationTypeImplicit: false},
* v2: {name: v2, isKey: false, aggType: sum, isAggregationTypeImplicit: false}]
* This order of mvColumnItemList is meaningful.
*/
private List<MVColumnItem> mvColumnItemList = Lists.newArrayList();
private String baseIndexName;
private String dbName;
private KeysType mvKeysType = KeysType.DUP_KEYS;
public CreateMaterializedViewStmt(String mvName, SelectStmt selectStmt, Map<String, String> properties) {
this.mvName = mvName;
this.selectStmt = selectStmt;
this.properties = properties;
}
public String getMVName() {
return mvName;
}
public List<MVColumnItem> getMVColumnItemList() {
return mvColumnItemList;
}
public String getBaseIndexName() {
return baseIndexName;
}
public Map<String, String> getProperties() {
return properties;
}
public String getDBName() {
return dbName;
}
public KeysType getMVKeysType() {
return mvKeysType;
}
@Override
public void analyze(Analyzer analyzer) throws UserException {
if (!Config.enable_materialized_view) {
throw new AnalysisException("The materialized view is disabled");
}
super.analyze(analyzer);
FeNameFormat.checkTableName(mvName);
selectStmt.analyze(analyzer);
if (selectStmt.getAggInfo() != null) {
mvKeysType = KeysType.AGG_KEYS;
}
analyzeSelectClause();
analyzeFromClause();
if (selectStmt.getWhereClause() != null) {
throw new AnalysisException("The where clause is not supported in add materialized view clause, expr:"
+ selectStmt.getWhereClause().toSql());
}
if (selectStmt.getHavingPred() != null) {
throw new AnalysisException("The having clause is not supported in add materialized view clause, expr:"
+ selectStmt.getHavingPred().toSql());
}
analyzeOrderByClause();
if (selectStmt.getLimit() != -1) {
throw new AnalysisException("The limit clause is not supported in add materialized view clause, expr:"
+ " limit " + selectStmt.getLimit());
}
}
public void analyzeSelectClause() throws AnalysisException {
SelectList selectList = selectStmt.getSelectList();
if (selectList.getItems().isEmpty()) {
throw new AnalysisException("The materialized view must contain at least one column");
}
boolean meetAggregate = false;
Set<String> mvColumnNameSet = Sets.newHashSet();
/**
* 1. The columns of mv must be a single column or a aggregate column without any calculate.
* Also the children of aggregate column must be a single column without any calculate.
* For example:
* a, sum(b) is legal.
* a+b, sum(a+b) is illegal.
* 2. The SUM, MIN, MAX function is supported. The other function will be supported in the future.
* 3. The aggregate column must be declared after the single column.
*/
for (int i = 0; i < selectList.getItems().size(); i++) {
SelectListItem selectListItem = selectList.getItems().get(i);
Expr selectListItemExpr = selectListItem.getExpr();
if (!(selectListItemExpr instanceof SlotRef) && !(selectListItemExpr instanceof FunctionCallExpr)) {
throw new AnalysisException("The materialized view only support the single column or function expr. "
+ "Error column: " + selectListItemExpr.toSql());
}
if (selectListItem.getExpr() instanceof SlotRef) {
if (meetAggregate) {
throw new AnalysisException("The aggregate column should be after the single column");
}
SlotRef slotRef = (SlotRef) selectListItem.getExpr();
String columnName = slotRef.getColumnName().toLowerCase();
if (!mvColumnNameSet.add(columnName)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName);
}
MVColumnItem mvColumnItem = new MVColumnItem(columnName);
mvColumnItem.setType(slotRef.getType());
mvColumnItemList.add(mvColumnItem);
} else if (selectListItem.getExpr() instanceof FunctionCallExpr) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) selectListItem.getExpr();
String functionName = functionCallExpr.getFnName().getFunction();
Expr defineExpr = null;
if (!functionName.equalsIgnoreCase("sum")
&& !functionName.equalsIgnoreCase("min")
&& !functionName.equalsIgnoreCase("max")) {
throw new AnalysisException("The materialized view only support the sum, min and max aggregate "
+ "function. Error function: " + functionCallExpr.toSqlImpl());
}
Preconditions.checkState(functionCallExpr.getChildren().size() == 1);
Expr functionChild0 = functionCallExpr.getChild(0);
SlotRef slotRef;
if (functionChild0 instanceof SlotRef) {
slotRef = (SlotRef) functionChild0;
} else if (functionChild0 instanceof CastExpr && (functionChild0.getChild(0) instanceof SlotRef)) {
slotRef = (SlotRef) functionChild0.getChild(0);
} else {
throw new AnalysisException("The children of aggregate function only support one original column. "
+ "Error function: " + functionCallExpr.toSqlImpl());
}
meetAggregate = true;
String columnName = slotRef.getColumnName().toLowerCase();
if (!mvColumnNameSet.add(columnName)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, columnName);
}
if (beginIndexOfAggregation == -1) {
beginIndexOfAggregation = i;
}
MVColumnItem mvColumnItem = new MVColumnItem(columnName);
mvColumnItem.setAggregationType(AggregateType.valueOf(functionName.toUpperCase()), false);
mvColumnItem.setDefineExpr(defineExpr);
mvColumnItemList.add(mvColumnItem);
}
}
if (beginIndexOfAggregation == 0) {
throw new AnalysisException("The materialized view must contain at least one key column");
}
}
private void analyzeFromClause() throws AnalysisException {
List<TableRef> tableRefList = selectStmt.getTableRefs();
if (tableRefList.size() != 1) {
throw new AnalysisException("The materialized view only support one table in from clause.");
}
TableName tableName = tableRefList.get(0).getName();
baseIndexName = tableName.getTbl();
dbName = tableName.getDb();
}
/*
This function is used to supply order by columns and calculate short key count
*/
private void supplyOrderColumn() throws AnalysisException {
/**
* The keys type of Materialized view is aggregation.
* All of group by columns are keys of materialized view.
*/
if (mvKeysType == KeysType.AGG_KEYS) {
for (MVColumnItem mvColumnItem : mvColumnItemList) {
if (mvColumnItem.getAggregationType() != null) {
break;
}
mvColumnItem.setIsKey(true);
}
} else if (mvKeysType == KeysType.DUP_KEYS) {
/**
* There is no aggregation function in materialized view.
* Supplement key of MV columns
* The key is same as the short key in duplicate table
* For example: select k1, k2 ... kn from t1
* The default key columns are first 36 bytes of the columns in define order.
* If the number of columns in the first 36 is more than 3, the first 3 columns will be used.
* column: k1, k2, k3. The key is true.
* Supplement non-key of MV columns
* column: k4... kn. The key is false, aggregation type is none, isAggregationTypeImplicit is true.
*/
int theBeginIndexOfValue = 0;
int keySizeByte = 0;
for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) {
MVColumnItem column = mvColumnItemList.get(theBeginIndexOfValue);
keySizeByte += column.getType().getIndexSize();
if (theBeginIndexOfValue + 1 > FeConstants.shortkey_max_column_count
|| keySizeByte > FeConstants.shortkey_maxsize_bytes) {
if (theBeginIndexOfValue == 0 && column.getType().getPrimitiveType().isCharFamily()) {
column.setIsKey(true);
theBeginIndexOfValue++;
}
break;
}
if (column.getType().isFloatingPointType()) {
break;
}
if (column.getType().getPrimitiveType() == PrimitiveType.VARCHAR) {
column.setIsKey(true);
theBeginIndexOfValue++;
break;
}
column.setIsKey(true);
}
if (theBeginIndexOfValue == 0) {
throw new AnalysisException("The first column could not be float or double type, use decimal instead");
}
for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) {
MVColumnItem mvColumnItem = mvColumnItemList.get(theBeginIndexOfValue);
mvColumnItem.setAggregationType(AggregateType.NONE, true);
}
}
}
@Override
public String toSql() {
return null;
}
} |
I had to add these classes in whitelist. Details in [BEAM-8758](https://issues.apache.org/jira/browse/BEAM-8758?focusedCommentId=17029981&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#comment-17029981). I hope they are acceptable. | public void testGcpApiSurface() throws Exception {
final Package thisPackage = this.getClass().getPackage();
final ClassLoader thisClassLoader = getClass().getClassLoader();
final ApiSurface apiSurface =
ApiSurface.ofPackage(thisPackage, thisClassLoader)
.pruningPattern(BigqueryMatcher.class.getName())
.pruningPattern(BigqueryClient.class.getName())
.pruningPattern("org[.]apache[.]beam[.].*Test.*")
.pruningPattern("org[.]apache[.]beam[.].*IT")
.pruningPattern("java[.]lang.*")
.pruningPattern("java[.]util.*");
@SuppressWarnings("unchecked")
final Set<Matcher<Class<?>>> allowedClasses =
ImmutableSet.of(
classesInPackage("com.google.api.core"),
classesInPackage("com.google.api.client.googleapis"),
classesInPackage("com.google.api.client.http"),
classesInPackage("com.google.api.client.json"),
classesInPackage("com.google.api.client.util"),
classesInPackage("com.google.api.services.bigquery.model"),
classesInPackage("com.google.auth"),
classesInPackage("com.google.bigtable.v2"),
classesInPackage("com.google.cloud.bigquery.storage.v1beta1"),
classesInPackage("com.google.cloud.bigtable.config"),
classesInPackage("com.google.spanner.v1"),
classesInPackage("com.google.pubsub.v1"),
Matchers.equalTo(com.google.api.gax.rpc.ApiException.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.longrunning.OperationFuture.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.longrunning.OperationSnapshot.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.paging.Page.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.retrying.RetryingFuture.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.retrying.RetrySettings.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.retrying.RetrySettings.Builder.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.retrying.TimedAttemptSettings.class),
Matchers.<Class<?>>equalTo(
com.google.api.gax.retrying.TimedAttemptSettings.Builder.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.rpc.StatusCode.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.rpc.StatusCode.Code.class),
Matchers.<Class<?>>equalTo(com.google.cloud.bigtable.grpc.BigtableClusterName.class),
Matchers.<Class<?>>equalTo(com.google.cloud.bigtable.grpc.BigtableInstanceName.class),
Matchers.<Class<?>>equalTo(com.google.cloud.bigtable.grpc.BigtableTableName.class),
Matchers.<Class<?>>equalTo(com.google.cloud.BaseServiceException.class),
Matchers.<Class<?>>equalTo(com.google.cloud.BaseServiceException.Error.class),
Matchers.<Class<?>>equalTo(com.google.cloud.BaseServiceException.ExceptionData.class),
Matchers.<Class<?>>equalTo(
com.google.cloud.BaseServiceException.ExceptionData.Builder.class),
Matchers.<Class<?>>equalTo(com.google.cloud.RetryHelper.RetryHelperException.class),
Matchers.<Class<?>>equalTo(com.google.cloud.grpc.BaseGrpcServiceException.class),
Matchers.<Class<?>>equalTo(com.google.cloud.ByteArray.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Date.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Timestamp.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Identity.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Identity.Type.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Policy.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Policy.Builder.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Policy.Marshaller.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Policy.DefaultMarshaller.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Role.class),
Matchers.<Class<?>>equalTo(com.google.cloud.StringEnumValue.class),
Matchers.<Class<?>>equalTo(com.google.iam.v1.Binding.class),
Matchers.<Class<?>>equalTo(com.google.iam.v1.Binding.Builder.class),
Matchers.<Class<?>>equalTo(com.google.iam.v1.Policy.class),
Matchers.<Class<?>>equalTo(com.google.iam.v1.Policy.Builder.class),
Matchers.<Class<?>>equalTo(com.google.iam.v1.BindingOrBuilder.class),
Matchers.<Class<?>>equalTo(com.google.iam.v1.PolicyOrBuilder.class),
classesInPackage("com.google.cloud.spanner"),
classesInPackage("com.google.spanner.admin.database.v1"),
classesInPackage("com.google.datastore.v1"),
classesInPackage("com.google.protobuf"),
classesInPackage("com.google.type"),
classesInPackage("com.fasterxml.jackson.annotation"),
classesInPackage("com.fasterxml.jackson.core"),
classesInPackage("com.fasterxml.jackson.databind"),
classesInPackage("io.grpc"),
classesInPackage("java"),
classesInPackage("javax"),
classesInPackage("org.apache.avro"),
classesInPackage("org.apache.beam"),
classesInPackage("org.apache.commons.logging"),
classesInPackage("org.codehaus.jackson"),
classesInPackage("org.joda.time"),
classesInPackage("org.threeten.bp"));
assertThat(apiSurface, containsOnlyClassesMatching(allowedClasses));
} | Matchers.<Class<?>>equalTo(com.google.iam.v1.PolicyOrBuilder.class), | public void testGcpApiSurface() throws Exception {
final Package thisPackage = this.getClass().getPackage();
final ClassLoader thisClassLoader = getClass().getClassLoader();
final ApiSurface apiSurface =
ApiSurface.ofPackage(thisPackage, thisClassLoader)
.pruningPattern(BigqueryMatcher.class.getName())
.pruningPattern(BigqueryClient.class.getName())
.pruningPattern("org[.]apache[.]beam[.].*Test.*")
.pruningPattern("org[.]apache[.]beam[.].*IT")
.pruningPattern("java[.]lang.*")
.pruningPattern("java[.]util.*");
@SuppressWarnings("unchecked")
final Set<Matcher<Class<?>>> allowedClasses =
ImmutableSet.of(
classesInPackage("com.google.api.core"),
classesInPackage("com.google.api.client.googleapis"),
classesInPackage("com.google.api.client.http"),
classesInPackage("com.google.api.client.json"),
classesInPackage("com.google.api.client.util"),
classesInPackage("com.google.api.services.bigquery.model"),
classesInPackage("com.google.auth"),
classesInPackage("com.google.bigtable.v2"),
classesInPackage("com.google.cloud.bigquery.storage.v1beta1"),
classesInPackage("com.google.cloud.bigtable.config"),
classesInPackage("com.google.spanner.v1"),
classesInPackage("com.google.pubsub.v1"),
Matchers.equalTo(com.google.api.gax.rpc.ApiException.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.rpc.StatusCode.class),
Matchers.<Class<?>>equalTo(com.google.api.gax.rpc.StatusCode.Code.class),
Matchers.<Class<?>>equalTo(com.google.cloud.bigtable.grpc.BigtableClusterName.class),
Matchers.<Class<?>>equalTo(com.google.cloud.bigtable.grpc.BigtableInstanceName.class),
Matchers.<Class<?>>equalTo(com.google.cloud.bigtable.grpc.BigtableTableName.class),
Matchers.<Class<?>>equalTo(com.google.cloud.BaseServiceException.class),
Matchers.<Class<?>>equalTo(com.google.cloud.BaseServiceException.Error.class),
Matchers.<Class<?>>equalTo(com.google.cloud.BaseServiceException.ExceptionData.class),
Matchers.<Class<?>>equalTo(
com.google.cloud.BaseServiceException.ExceptionData.Builder.class),
Matchers.<Class<?>>equalTo(com.google.cloud.RetryHelper.RetryHelperException.class),
Matchers.<Class<?>>equalTo(com.google.cloud.grpc.BaseGrpcServiceException.class),
Matchers.<Class<?>>equalTo(com.google.cloud.ByteArray.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Date.class),
Matchers.<Class<?>>equalTo(com.google.cloud.Timestamp.class),
classesInPackage("com.google.cloud.spanner"),
classesInPackage("com.google.datastore.v1"),
classesInPackage("com.google.protobuf"),
classesInPackage("com.google.type"),
classesInPackage("com.fasterxml.jackson.annotation"),
classesInPackage("com.fasterxml.jackson.core"),
classesInPackage("com.fasterxml.jackson.databind"),
classesInPackage("io.grpc"),
classesInPackage("java"),
classesInPackage("javax"),
classesInPackage("org.apache.avro"),
classesInPackage("org.apache.beam"),
classesInPackage("org.apache.commons.logging"),
classesInPackage("org.codehaus.jackson"),
classesInPackage("org.joda.time"));
assertThat(apiSurface, containsOnlyClassesMatching(allowedClasses));
} | class GcpApiSurfaceTest {
@Test
} | class GcpApiSurfaceTest {
@Test
} |
Unless `entry.getValue()` is pre-sanitized we might want to add some escaping here to ensure our label values are always edible: > `label_value` can be any sequence of UTF-8 characters, but the backslash (`\`), double-quote (`"`), and line feed (`\n`) characters have to be escaped as `\\`, `\"`, and `\n`, respectively | private String toPrometheusDimensions(MetricDimensions dimensions) {
if (dimensions == null) return "";
StringBuilder builder = new StringBuilder();
dimensions.forEach(entry -> {
var sanitized = prometheusSanitizedName(entry.getKey()) + "=\"" + entry.getValue() + "\",";
builder.append(sanitized);
});
return builder.toString();
} | var sanitized = prometheusSanitizedName(entry.getKey()) + "=\"" + entry.getValue() + "\","; | private String toPrometheusDimensions(MetricDimensions dimensions) {
if (dimensions == null) return "";
StringBuilder builder = new StringBuilder();
dimensions.forEach(entry -> {
var sanitized = prometheusSanitizedName(entry.getKey()) + "=\"" + entry.getValue() + "\",";
builder.append(sanitized);
});
return builder.toString();
} | class MyContentChannel implements ContentChannel {
private final List<ByteBuffer> buffers;
private final Runnable trigger;
@Override
public void write(ByteBuffer buf, CompletionHandler handler) {
buffers.add(buf);
if (handler != null) handler.completed();
}
@Override
public void close(CompletionHandler handler) {
trigger.run();
if (handler != null) handler.completed();
}
MyContentChannel(List<ByteBuffer> buffers, Runnable trigger) {
this.buffers = buffers;
this.trigger = trigger;
}
} | class MyContentChannel implements ContentChannel {
private final List<ByteBuffer> buffers;
private final Runnable trigger;
@Override
public void write(ByteBuffer buf, CompletionHandler handler) {
buffers.add(buf);
if (handler != null) handler.completed();
}
@Override
public void close(CompletionHandler handler) {
trigger.run();
if (handler != null) handler.completed();
}
MyContentChannel(List<ByteBuffer> buffers, Runnable trigger) {
this.buffers = buffers;
this.trigger = trigger;
}
} |
nit: add comments just like getProperties. ditto: isStatisticsEnabled. | public Collection<TickerType> getMonitorTickerTypes() {
return Collections.unmodifiableCollection(monitorTickerTypes);
} | return Collections.unmodifiableCollection(monitorTickerTypes); | public Collection<TickerType> getMonitorTickerTypes() {
return Collections.unmodifiableCollection(monitorTickerTypes);
} | class RocksDBNativeMetricOptions implements Serializable {
private static final long serialVersionUID = 1L;
public static final String METRICS_COLUMN_FAMILY_AS_VARIABLE_KEY =
"state.backend.rocksdb.metrics" + ".column-family-as-variable";
public static final ConfigOption<Boolean> MONITOR_NUM_IMMUTABLE_MEM_TABLES =
ConfigOptions.key(RocksDBProperty.NumImmutableMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of immutable memtables in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_MEM_TABLE_FLUSH_PENDING =
ConfigOptions.key(RocksDBProperty.MemTableFlushPending.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of pending memtable flushes in RocksDB.");
public static final ConfigOption<Boolean> TRACK_COMPACTION_PENDING =
ConfigOptions.key(RocksDBProperty.CompactionPending.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Track pending compactions in RocksDB. Returns 1 if a compaction is pending, 0 otherwise.");
public static final ConfigOption<Boolean> MONITOR_BACKGROUND_ERRORS =
ConfigOptions.key(RocksDBProperty.BackgroundErrors.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of background errors in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_CUR_SIZE_ACTIVE_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.CurSizeActiveMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the approximate size of the active memtable in bytes.");
public static final ConfigOption<Boolean> MONITOR_CUR_SIZE_ALL_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.CurSizeAllMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the approximate size of the active and unflushed immutable memtables"
+ " in bytes.");
public static final ConfigOption<Boolean> MONITOR_SIZE_ALL_MEM_TABLES =
ConfigOptions.key(RocksDBProperty.SizeAllMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the approximate size of the active, unflushed immutable, "
+ "and pinned immutable memtables in bytes.");
public static final ConfigOption<Boolean> MONITOR_NUM_ENTRIES_ACTIVE_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.NumEntriesActiveMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the total number of entries in the active memtable.");
public static final ConfigOption<Boolean> MONITOR_NUM_ENTRIES_IMM_MEM_TABLES =
ConfigOptions.key(RocksDBProperty.NumEntriesImmMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total number of entries in the unflushed immutable memtables.");
public static final ConfigOption<Boolean> MONITOR_NUM_DELETES_ACTIVE_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.NumDeletesActiveMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total number of delete entries in the active memtable.");
public static final ConfigOption<Boolean> MONITOR_NUM_DELETES_IMM_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.NumDeletesImmMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total number of delete entries in the unflushed immutable memtables.");
public static final ConfigOption<Boolean> ESTIMATE_NUM_KEYS =
ConfigOptions.key(RocksDBProperty.EstimateNumKeys.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Estimate the number of keys in RocksDB.");
public static final ConfigOption<Boolean> ESTIMATE_TABLE_READERS_MEM =
ConfigOptions.key(RocksDBProperty.EstimateTableReadersMem.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Estimate the memory used for reading SST tables, excluding memory"
+ " used in block cache (e.g.,filter and index blocks) in bytes.");
public static final ConfigOption<Boolean> MONITOR_NUM_SNAPSHOTS =
ConfigOptions.key(RocksDBProperty.NumSnapshots.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of unreleased snapshots of the database.");
public static final ConfigOption<Boolean> MONITOR_NUM_LIVE_VERSIONS =
ConfigOptions.key(RocksDBProperty.NumLiveVersions.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor number of live versions. Version is an internal data structure. "
+ "See RocksDB file version_set.h for details. More live versions often mean more SST files are held "
+ "from being deleted, by iterators or unfinished compactions.");
public static final ConfigOption<Boolean> ESTIMATE_LIVE_DATA_SIZE =
ConfigOptions.key(RocksDBProperty.EstimateLiveDataSize.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Estimate of the amount of live data in bytes (usually smaller than sst files size due to space amplification).");
public static final ConfigOption<Boolean> MONITOR_TOTAL_SST_FILES_SIZE =
ConfigOptions.key(RocksDBProperty.TotalSstFilesSize.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total size (bytes) of all SST files of all versions."
+ "WARNING: may slow down online queries if there are too many files.");
public static final ConfigOption<Boolean> MONITOR_LIVE_SST_FILES_SIZE =
ConfigOptions.key(RocksDBProperty.LiveSstFilesSize.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total size (bytes) of all SST files belonging to the latest version."
+ "WARNING: may slow down online queries if there are too many files.");
public static final ConfigOption<Boolean> ESTIMATE_PENDING_COMPACTION_BYTES =
ConfigOptions.key(RocksDBProperty.EstimatePendingCompactionBytes.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Estimated total number of bytes compaction needs to rewrite to get all levels "
+ "down to under target size. Not valid for other compactions than level-based.");
public static final ConfigOption<Boolean> MONITOR_NUM_RUNNING_COMPACTIONS =
ConfigOptions.key(RocksDBProperty.NumRunningCompactions.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of currently running compactions.");
public static final ConfigOption<Boolean> MONITOR_NUM_RUNNING_FLUSHES =
ConfigOptions.key(RocksDBProperty.NumRunningFlushes.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of currently running flushes.");
public static final ConfigOption<Boolean> MONITOR_ACTUAL_DELAYED_WRITE_RATE =
ConfigOptions.key(RocksDBProperty.ActualDelayedWriteRate.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the current actual delayed write rate. 0 means no delay.");
public static final ConfigOption<Boolean> IS_WRITE_STOPPED =
ConfigOptions.key(RocksDBProperty.IsWriteStopped.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Track whether write has been stopped in RocksDB. Returns 1 if write has been stopped, 0 otherwise.");
public static final ConfigOption<Boolean> BLOCK_CACHE_CAPACITY =
ConfigOptions.key(RocksDBProperty.BlockCacheCapacity.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor block cache capacity.");
public static final ConfigOption<Boolean> BLOCK_CACHE_USAGE =
ConfigOptions.key(RocksDBProperty.BlockCacheUsage.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the memory size for the entries residing in block cache.");
public static final ConfigOption<Boolean> BLOCK_CACHE_PINNED_USAGE =
ConfigOptions.key(RocksDBProperty.BlockCachePinnedUsage.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the memory size for the entries being pinned in block cache.");
public static final ConfigOption<Boolean> COLUMN_FAMILY_AS_VARIABLE =
ConfigOptions.key(METRICS_COLUMN_FAMILY_AS_VARIABLE_KEY)
.booleanType()
.defaultValue(false)
.withDescription(
"Whether to expose the column family as a variable for RocksDB property based metrics.");
public static final ConfigOption<Boolean> MONITOR_BLOCK_CACHE_HIT =
ConfigOptions.key("state.backend.rocksdb.metrics.block-cache-hit")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total count of block cache hit in RocksDB (BLOCK_CACHE_HIT == BLOCK_CACHE_INDEX_HIT + BLOCK_CACHE_FILTER_HIT + BLOCK_CACHE_DATA_HIT).");
public static final ConfigOption<Boolean> MONITOR_BLOCK_CACHE_MISS =
ConfigOptions.key("state.backend.rocksdb.metrics.block-cache-miss")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total count of block cache misses in RocksDB (BLOCK_CACHE_MISS == BLOCK_CACHE_INDEX_MISS + BLOCK_CACHE_FILTER_MISS + BLOCK_CACHE_DATA_MISS).");
public static final ConfigOption<Boolean> MONITOR_BYTES_READ =
ConfigOptions.key("state.backend.rocksdb.metrics.bytes-read")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the number of uncompressed bytes read (from memtables/cache/sst) from Get() operation in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_ITER_BYTES_READ =
ConfigOptions.key("state.backend.rocksdb.metrics.iter-bytes-read")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the number of uncompressed bytes read (from memtables/cache/sst) from an iterator operation in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_BYTES_WRITTEN =
ConfigOptions.key("state.backend.rocksdb.metrics.bytes-written")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the number of uncompressed bytes written by DB::{Put(), Delete(), Merge(), Write()} operations, which does not include the compaction written bytes, in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_COMPACTION_READ_BYTES =
ConfigOptions.key("state.backend.rocksdb.metrics.compaction-read-bytes")
.booleanType()
.defaultValue(false)
.withDescription("Monitor the bytes read during compaction in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_COMPACTION_WRITE_BYTES =
ConfigOptions.key("state.backend.rocksdb.metrics.compaction-write-bytes")
.booleanType()
.defaultValue(false)
.withDescription("Monitor the bytes written during compaction in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_STALL_MICROS =
ConfigOptions.key("state.backend.rocksdb.metrics.stall-micros")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the duration of writer requiring to wait for compaction or flush to finish in RocksDB.");
/** Creates a {@link RocksDBNativeMetricOptions} based on an external configuration. */
public static RocksDBNativeMetricOptions fromConfig(ReadableConfig config) {
RocksDBNativeMetricOptions options = new RocksDBNativeMetricOptions();
configurePropertyMetrics(options, config);
configureStatisticsMetrics(options, config);
return options;
}
private static void configurePropertyMetrics(
RocksDBNativeMetricOptions options, ReadableConfig config) {
if (config.get(MONITOR_NUM_IMMUTABLE_MEM_TABLES)) {
options.enableNumImmutableMemTable();
}
if (config.get(MONITOR_MEM_TABLE_FLUSH_PENDING)) {
options.enableMemTableFlushPending();
}
if (config.get(TRACK_COMPACTION_PENDING)) {
options.enableCompactionPending();
}
if (config.get(MONITOR_BACKGROUND_ERRORS)) {
options.enableBackgroundErrors();
}
if (config.get(MONITOR_CUR_SIZE_ACTIVE_MEM_TABLE)) {
options.enableCurSizeActiveMemTable();
}
if (config.get(MONITOR_CUR_SIZE_ALL_MEM_TABLE)) {
options.enableCurSizeAllMemTables();
}
if (config.get(MONITOR_SIZE_ALL_MEM_TABLES)) {
options.enableSizeAllMemTables();
}
if (config.get(MONITOR_NUM_ENTRIES_ACTIVE_MEM_TABLE)) {
options.enableNumEntriesActiveMemTable();
}
if (config.get(MONITOR_NUM_ENTRIES_IMM_MEM_TABLES)) {
options.enableNumEntriesImmMemTables();
}
if (config.get(MONITOR_NUM_DELETES_ACTIVE_MEM_TABLE)) {
options.enableNumDeletesActiveMemTable();
}
if (config.get(MONITOR_NUM_DELETES_IMM_MEM_TABLE)) {
options.enableNumDeletesImmMemTables();
}
if (config.get(ESTIMATE_NUM_KEYS)) {
options.enableEstimateNumKeys();
}
if (config.get(ESTIMATE_TABLE_READERS_MEM)) {
options.enableEstimateTableReadersMem();
}
if (config.get(MONITOR_NUM_SNAPSHOTS)) {
options.enableNumSnapshots();
}
if (config.get(MONITOR_NUM_LIVE_VERSIONS)) {
options.enableNumLiveVersions();
}
if (config.get(ESTIMATE_LIVE_DATA_SIZE)) {
options.enableEstimateLiveDataSize();
}
if (config.get(MONITOR_TOTAL_SST_FILES_SIZE)) {
options.enableTotalSstFilesSize();
}
if (config.get(MONITOR_LIVE_SST_FILES_SIZE)) {
options.enableLiveSstFilesSize();
}
if (config.get(ESTIMATE_PENDING_COMPACTION_BYTES)) {
options.enableEstimatePendingCompactionBytes();
}
if (config.get(MONITOR_NUM_RUNNING_COMPACTIONS)) {
options.enableNumRunningCompactions();
}
if (config.get(MONITOR_NUM_RUNNING_FLUSHES)) {
options.enableNumRunningFlushes();
}
if (config.get(MONITOR_ACTUAL_DELAYED_WRITE_RATE)) {
options.enableActualDelayedWriteRate();
}
if (config.get(IS_WRITE_STOPPED)) {
options.enableIsWriteStopped();
}
if (config.get(BLOCK_CACHE_CAPACITY)) {
options.enableBlockCacheCapacity();
}
if (config.get(BLOCK_CACHE_USAGE)) {
options.enableBlockCacheUsage();
}
if (config.get(BLOCK_CACHE_PINNED_USAGE)) {
options.enableBlockCachePinnedUsage();
}
options.setColumnFamilyAsVariable(config.get(COLUMN_FAMILY_AS_VARIABLE));
}
private static void configureStatisticsMetrics(
RocksDBNativeMetricOptions options, ReadableConfig config) {
for (Map.Entry<ConfigOption<Boolean>, TickerType> entry : tickerTypeMapping.entrySet()) {
if (config.get(entry.getKey())) {
options.monitorTickerTypes.add(entry.getValue());
}
}
}
private static final Map<ConfigOption<Boolean>, TickerType> tickerTypeMapping =
new HashMap<ConfigOption<Boolean>, TickerType>() {
private static final long serialVersionUID = 1L;
{
put(MONITOR_BLOCK_CACHE_HIT, TickerType.BLOCK_CACHE_HIT);
put(MONITOR_BLOCK_CACHE_MISS, TickerType.BLOCK_CACHE_MISS);
put(MONITOR_BYTES_READ, TickerType.BYTES_READ);
put(MONITOR_ITER_BYTES_READ, TickerType.ITER_BYTES_READ);
put(MONITOR_BYTES_WRITTEN, TickerType.BYTES_WRITTEN);
put(MONITOR_COMPACTION_READ_BYTES, TickerType.COMPACT_READ_BYTES);
put(MONITOR_COMPACTION_WRITE_BYTES, TickerType.COMPACT_WRITE_BYTES);
put(MONITOR_STALL_MICROS, TickerType.STALL_MICROS);
}
};
private final Set<String> properties;
private final Set<TickerType> monitorTickerTypes;
private boolean columnFamilyAsVariable = COLUMN_FAMILY_AS_VARIABLE.defaultValue();
public RocksDBNativeMetricOptions() {
this.properties = new HashSet<>();
this.monitorTickerTypes = new HashSet<>();
}
@VisibleForTesting
public void enableNativeStatistics(ConfigOption<Boolean> nativeStatisticsOption) {
TickerType tickerType = tickerTypeMapping.get(nativeStatisticsOption);
if (tickerType != null) {
monitorTickerTypes.add(tickerType);
} else {
throw new IllegalArgumentException(
"Unknown configurable native statistics option " + nativeStatisticsOption);
}
}
/** Returns number of immutable memtables that have not yet been flushed. */
public void enableNumImmutableMemTable() {
this.properties.add(RocksDBProperty.NumImmutableMemTable.getRocksDBProperty());
}
/** Returns 1 if a memtable flush is pending; otherwise, returns 0. */
public void enableMemTableFlushPending() {
this.properties.add(RocksDBProperty.MemTableFlushPending.getRocksDBProperty());
}
/** Returns 1 if at least one compaction is pending; otherwise, returns 0. */
public void enableCompactionPending() {
this.properties.add(RocksDBProperty.CompactionPending.getRocksDBProperty());
}
/** Returns accumulated number of background errors. */
public void enableBackgroundErrors() {
this.properties.add(RocksDBProperty.BackgroundErrors.getRocksDBProperty());
}
/** Returns approximate size of active memtable (bytes). */
public void enableCurSizeActiveMemTable() {
this.properties.add(RocksDBProperty.CurSizeActiveMemTable.getRocksDBProperty());
}
/** Returns approximate size of active and unflushed immutable memtables (bytes). */
public void enableCurSizeAllMemTables() {
this.properties.add(RocksDBProperty.CurSizeAllMemTables.getRocksDBProperty());
}
/**
* Returns approximate size of active, unflushed immutable, and pinned immutable memtables
* (bytes).
*/
public void enableSizeAllMemTables() {
this.properties.add(RocksDBProperty.SizeAllMemTables.getRocksDBProperty());
}
/** Returns total number of entries in the active memtable. */
public void enableNumEntriesActiveMemTable() {
this.properties.add(RocksDBProperty.NumEntriesActiveMemTable.getRocksDBProperty());
}
/** Returns total number of entries in the unflushed immutable memtables. */
public void enableNumEntriesImmMemTables() {
this.properties.add(RocksDBProperty.NumEntriesImmMemTables.getRocksDBProperty());
}
/** Returns total number of delete entries in the active memtable. */
public void enableNumDeletesActiveMemTable() {
this.properties.add(RocksDBProperty.NumDeletesActiveMemTable.getRocksDBProperty());
}
/** Returns total number of delete entries in the unflushed immutable memtables. */
public void enableNumDeletesImmMemTables() {
this.properties.add(RocksDBProperty.NumDeletesImmMemTables.getRocksDBProperty());
}
/**
* Returns estimated number of total keys in the active and unflushed immutable memtables and
* storage.
*/
public void enableEstimateNumKeys() {
this.properties.add(RocksDBProperty.EstimateNumKeys.getRocksDBProperty());
}
/**
* Returns estimated memory used for reading SST tables, excluding memory used in block cache
* (e.g.,filter and index blocks).
*/
public void enableEstimateTableReadersMem() {
this.properties.add(RocksDBProperty.EstimateTableReadersMem.getRocksDBProperty());
}
/** Returns number of unreleased snapshots of the database. */
public void enableNumSnapshots() {
this.properties.add(RocksDBProperty.NumSnapshots.getRocksDBProperty());
}
/**
* Returns number of live versions. `Version` is an internal data structure. See version_set.h
* for details. More live versions often mean more SST files are held from being deleted, by
* iterators or unfinished compactions.
*/
public void enableNumLiveVersions() {
this.properties.add(RocksDBProperty.NumLiveVersions.getRocksDBProperty());
}
/** Returns an estimate of the amount of live data in bytes. */
public void enableEstimateLiveDataSize() {
this.properties.add(RocksDBProperty.EstimateLiveDataSize.getRocksDBProperty());
}
/**
* Returns total size (bytes) of all SST files. <strong>WARNING</strong>: may slow down online
* queries if there are too many files.
*/
public void enableTotalSstFilesSize() {
this.properties.add(RocksDBProperty.TotalSstFilesSize.getRocksDBProperty());
}
public void enableLiveSstFilesSize() {
this.properties.add(RocksDBProperty.LiveSstFilesSize.getRocksDBProperty());
}
/**
* Returns estimated total number of bytes compaction needs to rewrite to get all levels down to
* under target size. Not valid for other compactions than level-based.
*/
public void enableEstimatePendingCompactionBytes() {
this.properties.add(RocksDBProperty.EstimatePendingCompactionBytes.getRocksDBProperty());
}
/** Returns the number of currently running compactions. */
public void enableNumRunningCompactions() {
this.properties.add(RocksDBProperty.NumRunningCompactions.getRocksDBProperty());
}
/** Returns the number of currently running flushes. */
public void enableNumRunningFlushes() {
this.properties.add(RocksDBProperty.NumRunningFlushes.getRocksDBProperty());
}
/** Returns the current actual delayed write rate. 0 means no delay. */
public void enableActualDelayedWriteRate() {
this.properties.add(RocksDBProperty.ActualDelayedWriteRate.getRocksDBProperty());
}
/** Returns 1 if write has been stopped. */
public void enableIsWriteStopped() {
this.properties.add(RocksDBProperty.IsWriteStopped.getRocksDBProperty());
}
/** Returns block cache capacity. */
public void enableBlockCacheCapacity() {
this.properties.add(RocksDBProperty.BlockCacheCapacity.getRocksDBProperty());
}
/** Returns the memory size for the entries residing in block cache. */
public void enableBlockCacheUsage() {
this.properties.add(RocksDBProperty.BlockCacheUsage.getRocksDBProperty());
}
/** Returns the memory size for the entries being pinned in block cache. */
public void enableBlockCachePinnedUsage() {
this.properties.add(RocksDBProperty.BlockCachePinnedUsage.getRocksDBProperty());
}
/** Returns the column family as variable. */
public void setColumnFamilyAsVariable(boolean columnFamilyAsVariable) {
this.columnFamilyAsVariable = columnFamilyAsVariable;
}
/** @return the enabled RocksDB metrics */
public Collection<String> getProperties() {
return Collections.unmodifiableCollection(properties);
}
/**
* {{@link RocksDBNativeMetricMonitor}} is enabled is any property or ticker type is set.
*
* @return true if {{RocksDBNativeMetricMonitor}} should be enabled, false otherwise.
*/
public boolean isEnabled() {
return !properties.isEmpty() || isStatisticsEnabled();
}
public boolean isStatisticsEnabled() {
return !monitorTickerTypes.isEmpty();
}
/**
* {{@link RocksDBNativeMetricMonitor}} Whether to expose the column family as a variable..
*
* @return true is column family to expose variable, false otherwise.
*/
public boolean isColumnFamilyAsVariable() {
return this.columnFamilyAsVariable;
}
} | class RocksDBNativeMetricOptions implements Serializable {
private static final long serialVersionUID = 1L;
public static final String METRICS_COLUMN_FAMILY_AS_VARIABLE_KEY =
"state.backend.rocksdb.metrics" + ".column-family-as-variable";
public static final ConfigOption<Boolean> MONITOR_NUM_IMMUTABLE_MEM_TABLES =
ConfigOptions.key(RocksDBProperty.NumImmutableMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of immutable memtables in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_MEM_TABLE_FLUSH_PENDING =
ConfigOptions.key(RocksDBProperty.MemTableFlushPending.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of pending memtable flushes in RocksDB.");
public static final ConfigOption<Boolean> TRACK_COMPACTION_PENDING =
ConfigOptions.key(RocksDBProperty.CompactionPending.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Track pending compactions in RocksDB. Returns 1 if a compaction is pending, 0 otherwise.");
public static final ConfigOption<Boolean> MONITOR_BACKGROUND_ERRORS =
ConfigOptions.key(RocksDBProperty.BackgroundErrors.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of background errors in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_CUR_SIZE_ACTIVE_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.CurSizeActiveMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the approximate size of the active memtable in bytes.");
public static final ConfigOption<Boolean> MONITOR_CUR_SIZE_ALL_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.CurSizeAllMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the approximate size of the active and unflushed immutable memtables"
+ " in bytes.");
public static final ConfigOption<Boolean> MONITOR_SIZE_ALL_MEM_TABLES =
ConfigOptions.key(RocksDBProperty.SizeAllMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the approximate size of the active, unflushed immutable, "
+ "and pinned immutable memtables in bytes.");
public static final ConfigOption<Boolean> MONITOR_NUM_ENTRIES_ACTIVE_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.NumEntriesActiveMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the total number of entries in the active memtable.");
public static final ConfigOption<Boolean> MONITOR_NUM_ENTRIES_IMM_MEM_TABLES =
ConfigOptions.key(RocksDBProperty.NumEntriesImmMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total number of entries in the unflushed immutable memtables.");
public static final ConfigOption<Boolean> MONITOR_NUM_DELETES_ACTIVE_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.NumDeletesActiveMemTable.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total number of delete entries in the active memtable.");
public static final ConfigOption<Boolean> MONITOR_NUM_DELETES_IMM_MEM_TABLE =
ConfigOptions.key(RocksDBProperty.NumDeletesImmMemTables.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total number of delete entries in the unflushed immutable memtables.");
public static final ConfigOption<Boolean> ESTIMATE_NUM_KEYS =
ConfigOptions.key(RocksDBProperty.EstimateNumKeys.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Estimate the number of keys in RocksDB.");
public static final ConfigOption<Boolean> ESTIMATE_TABLE_READERS_MEM =
ConfigOptions.key(RocksDBProperty.EstimateTableReadersMem.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Estimate the memory used for reading SST tables, excluding memory"
+ " used in block cache (e.g.,filter and index blocks) in bytes.");
public static final ConfigOption<Boolean> MONITOR_NUM_SNAPSHOTS =
ConfigOptions.key(RocksDBProperty.NumSnapshots.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of unreleased snapshots of the database.");
public static final ConfigOption<Boolean> MONITOR_NUM_LIVE_VERSIONS =
ConfigOptions.key(RocksDBProperty.NumLiveVersions.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor number of live versions. Version is an internal data structure. "
+ "See RocksDB file version_set.h for details. More live versions often mean more SST files are held "
+ "from being deleted, by iterators or unfinished compactions.");
public static final ConfigOption<Boolean> ESTIMATE_LIVE_DATA_SIZE =
ConfigOptions.key(RocksDBProperty.EstimateLiveDataSize.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Estimate of the amount of live data in bytes (usually smaller than sst files size due to space amplification).");
public static final ConfigOption<Boolean> MONITOR_TOTAL_SST_FILES_SIZE =
ConfigOptions.key(RocksDBProperty.TotalSstFilesSize.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total size (bytes) of all SST files of all versions."
+ "WARNING: may slow down online queries if there are too many files.");
public static final ConfigOption<Boolean> MONITOR_LIVE_SST_FILES_SIZE =
ConfigOptions.key(RocksDBProperty.LiveSstFilesSize.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total size (bytes) of all SST files belonging to the latest version."
+ "WARNING: may slow down online queries if there are too many files.");
public static final ConfigOption<Boolean> ESTIMATE_PENDING_COMPACTION_BYTES =
ConfigOptions.key(RocksDBProperty.EstimatePendingCompactionBytes.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Estimated total number of bytes compaction needs to rewrite to get all levels "
+ "down to under target size. Not valid for other compactions than level-based.");
public static final ConfigOption<Boolean> MONITOR_NUM_RUNNING_COMPACTIONS =
ConfigOptions.key(RocksDBProperty.NumRunningCompactions.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of currently running compactions.");
public static final ConfigOption<Boolean> MONITOR_NUM_RUNNING_FLUSHES =
ConfigOptions.key(RocksDBProperty.NumRunningFlushes.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor the number of currently running flushes.");
public static final ConfigOption<Boolean> MONITOR_ACTUAL_DELAYED_WRITE_RATE =
ConfigOptions.key(RocksDBProperty.ActualDelayedWriteRate.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the current actual delayed write rate. 0 means no delay.");
public static final ConfigOption<Boolean> IS_WRITE_STOPPED =
ConfigOptions.key(RocksDBProperty.IsWriteStopped.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Track whether write has been stopped in RocksDB. Returns 1 if write has been stopped, 0 otherwise.");
public static final ConfigOption<Boolean> BLOCK_CACHE_CAPACITY =
ConfigOptions.key(RocksDBProperty.BlockCacheCapacity.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription("Monitor block cache capacity.");
public static final ConfigOption<Boolean> BLOCK_CACHE_USAGE =
ConfigOptions.key(RocksDBProperty.BlockCacheUsage.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the memory size for the entries residing in block cache.");
public static final ConfigOption<Boolean> BLOCK_CACHE_PINNED_USAGE =
ConfigOptions.key(RocksDBProperty.BlockCachePinnedUsage.getConfigKey())
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the memory size for the entries being pinned in block cache.");
public static final ConfigOption<Boolean> COLUMN_FAMILY_AS_VARIABLE =
ConfigOptions.key(METRICS_COLUMN_FAMILY_AS_VARIABLE_KEY)
.booleanType()
.defaultValue(false)
.withDescription(
"Whether to expose the column family as a variable for RocksDB property based metrics.");
public static final ConfigOption<Boolean> MONITOR_BLOCK_CACHE_HIT =
ConfigOptions.key("state.backend.rocksdb.metrics.block-cache-hit")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total count of block cache hit in RocksDB (BLOCK_CACHE_HIT == BLOCK_CACHE_INDEX_HIT + BLOCK_CACHE_FILTER_HIT + BLOCK_CACHE_DATA_HIT).");
public static final ConfigOption<Boolean> MONITOR_BLOCK_CACHE_MISS =
ConfigOptions.key("state.backend.rocksdb.metrics.block-cache-miss")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the total count of block cache misses in RocksDB (BLOCK_CACHE_MISS == BLOCK_CACHE_INDEX_MISS + BLOCK_CACHE_FILTER_MISS + BLOCK_CACHE_DATA_MISS).");
public static final ConfigOption<Boolean> MONITOR_BYTES_READ =
ConfigOptions.key("state.backend.rocksdb.metrics.bytes-read")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the number of uncompressed bytes read (from memtables/cache/sst) from Get() operation in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_ITER_BYTES_READ =
ConfigOptions.key("state.backend.rocksdb.metrics.iter-bytes-read")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the number of uncompressed bytes read (from memtables/cache/sst) from an iterator operation in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_BYTES_WRITTEN =
ConfigOptions.key("state.backend.rocksdb.metrics.bytes-written")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the number of uncompressed bytes written by DB::{Put(), Delete(), Merge(), Write()} operations, which does not include the compaction written bytes, in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_COMPACTION_READ_BYTES =
ConfigOptions.key("state.backend.rocksdb.metrics.compaction-read-bytes")
.booleanType()
.defaultValue(false)
.withDescription("Monitor the bytes read during compaction in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_COMPACTION_WRITE_BYTES =
ConfigOptions.key("state.backend.rocksdb.metrics.compaction-write-bytes")
.booleanType()
.defaultValue(false)
.withDescription("Monitor the bytes written during compaction in RocksDB.");
public static final ConfigOption<Boolean> MONITOR_STALL_MICROS =
ConfigOptions.key("state.backend.rocksdb.metrics.stall-micros")
.booleanType()
.defaultValue(false)
.withDescription(
"Monitor the duration of writer requiring to wait for compaction or flush to finish in RocksDB.");
/** Creates a {@link RocksDBNativeMetricOptions} based on an external configuration. */
public static RocksDBNativeMetricOptions fromConfig(ReadableConfig config) {
RocksDBNativeMetricOptions options = new RocksDBNativeMetricOptions();
configurePropertyMetrics(options, config);
configureStatisticsMetrics(options, config);
return options;
}
private static void configurePropertyMetrics(
RocksDBNativeMetricOptions options, ReadableConfig config) {
if (config.get(MONITOR_NUM_IMMUTABLE_MEM_TABLES)) {
options.enableNumImmutableMemTable();
}
if (config.get(MONITOR_MEM_TABLE_FLUSH_PENDING)) {
options.enableMemTableFlushPending();
}
if (config.get(TRACK_COMPACTION_PENDING)) {
options.enableCompactionPending();
}
if (config.get(MONITOR_BACKGROUND_ERRORS)) {
options.enableBackgroundErrors();
}
if (config.get(MONITOR_CUR_SIZE_ACTIVE_MEM_TABLE)) {
options.enableCurSizeActiveMemTable();
}
if (config.get(MONITOR_CUR_SIZE_ALL_MEM_TABLE)) {
options.enableCurSizeAllMemTables();
}
if (config.get(MONITOR_SIZE_ALL_MEM_TABLES)) {
options.enableSizeAllMemTables();
}
if (config.get(MONITOR_NUM_ENTRIES_ACTIVE_MEM_TABLE)) {
options.enableNumEntriesActiveMemTable();
}
if (config.get(MONITOR_NUM_ENTRIES_IMM_MEM_TABLES)) {
options.enableNumEntriesImmMemTables();
}
if (config.get(MONITOR_NUM_DELETES_ACTIVE_MEM_TABLE)) {
options.enableNumDeletesActiveMemTable();
}
if (config.get(MONITOR_NUM_DELETES_IMM_MEM_TABLE)) {
options.enableNumDeletesImmMemTables();
}
if (config.get(ESTIMATE_NUM_KEYS)) {
options.enableEstimateNumKeys();
}
if (config.get(ESTIMATE_TABLE_READERS_MEM)) {
options.enableEstimateTableReadersMem();
}
if (config.get(MONITOR_NUM_SNAPSHOTS)) {
options.enableNumSnapshots();
}
if (config.get(MONITOR_NUM_LIVE_VERSIONS)) {
options.enableNumLiveVersions();
}
if (config.get(ESTIMATE_LIVE_DATA_SIZE)) {
options.enableEstimateLiveDataSize();
}
if (config.get(MONITOR_TOTAL_SST_FILES_SIZE)) {
options.enableTotalSstFilesSize();
}
if (config.get(MONITOR_LIVE_SST_FILES_SIZE)) {
options.enableLiveSstFilesSize();
}
if (config.get(ESTIMATE_PENDING_COMPACTION_BYTES)) {
options.enableEstimatePendingCompactionBytes();
}
if (config.get(MONITOR_NUM_RUNNING_COMPACTIONS)) {
options.enableNumRunningCompactions();
}
if (config.get(MONITOR_NUM_RUNNING_FLUSHES)) {
options.enableNumRunningFlushes();
}
if (config.get(MONITOR_ACTUAL_DELAYED_WRITE_RATE)) {
options.enableActualDelayedWriteRate();
}
if (config.get(IS_WRITE_STOPPED)) {
options.enableIsWriteStopped();
}
if (config.get(BLOCK_CACHE_CAPACITY)) {
options.enableBlockCacheCapacity();
}
if (config.get(BLOCK_CACHE_USAGE)) {
options.enableBlockCacheUsage();
}
if (config.get(BLOCK_CACHE_PINNED_USAGE)) {
options.enableBlockCachePinnedUsage();
}
options.setColumnFamilyAsVariable(config.get(COLUMN_FAMILY_AS_VARIABLE));
}
private static void configureStatisticsMetrics(
RocksDBNativeMetricOptions options, ReadableConfig config) {
for (Map.Entry<ConfigOption<Boolean>, TickerType> entry : tickerTypeMapping.entrySet()) {
if (config.get(entry.getKey())) {
options.monitorTickerTypes.add(entry.getValue());
}
}
}
private static final Map<ConfigOption<Boolean>, TickerType> tickerTypeMapping =
new HashMap<ConfigOption<Boolean>, TickerType>() {
private static final long serialVersionUID = 1L;
{
put(MONITOR_BLOCK_CACHE_HIT, TickerType.BLOCK_CACHE_HIT);
put(MONITOR_BLOCK_CACHE_MISS, TickerType.BLOCK_CACHE_MISS);
put(MONITOR_BYTES_READ, TickerType.BYTES_READ);
put(MONITOR_ITER_BYTES_READ, TickerType.ITER_BYTES_READ);
put(MONITOR_BYTES_WRITTEN, TickerType.BYTES_WRITTEN);
put(MONITOR_COMPACTION_READ_BYTES, TickerType.COMPACT_READ_BYTES);
put(MONITOR_COMPACTION_WRITE_BYTES, TickerType.COMPACT_WRITE_BYTES);
put(MONITOR_STALL_MICROS, TickerType.STALL_MICROS);
}
};
private final Set<String> properties;
private final Set<TickerType> monitorTickerTypes;
private boolean columnFamilyAsVariable = COLUMN_FAMILY_AS_VARIABLE.defaultValue();
public RocksDBNativeMetricOptions() {
this.properties = new HashSet<>();
this.monitorTickerTypes = new HashSet<>();
}
@VisibleForTesting
public void enableNativeStatistics(ConfigOption<Boolean> nativeStatisticsOption) {
TickerType tickerType = tickerTypeMapping.get(nativeStatisticsOption);
if (tickerType != null) {
monitorTickerTypes.add(tickerType);
} else {
throw new IllegalArgumentException(
"Unknown configurable native statistics option " + nativeStatisticsOption);
}
}
/** Returns number of immutable memtables that have not yet been flushed. */
public void enableNumImmutableMemTable() {
this.properties.add(RocksDBProperty.NumImmutableMemTable.getRocksDBProperty());
}
/** Returns 1 if a memtable flush is pending; otherwise, returns 0. */
public void enableMemTableFlushPending() {
this.properties.add(RocksDBProperty.MemTableFlushPending.getRocksDBProperty());
}
/** Returns 1 if at least one compaction is pending; otherwise, returns 0. */
public void enableCompactionPending() {
this.properties.add(RocksDBProperty.CompactionPending.getRocksDBProperty());
}
/** Returns accumulated number of background errors. */
public void enableBackgroundErrors() {
this.properties.add(RocksDBProperty.BackgroundErrors.getRocksDBProperty());
}
/** Returns approximate size of active memtable (bytes). */
public void enableCurSizeActiveMemTable() {
this.properties.add(RocksDBProperty.CurSizeActiveMemTable.getRocksDBProperty());
}
/** Returns approximate size of active and unflushed immutable memtables (bytes). */
public void enableCurSizeAllMemTables() {
this.properties.add(RocksDBProperty.CurSizeAllMemTables.getRocksDBProperty());
}
/**
* Returns approximate size of active, unflushed immutable, and pinned immutable memtables
* (bytes).
*/
public void enableSizeAllMemTables() {
this.properties.add(RocksDBProperty.SizeAllMemTables.getRocksDBProperty());
}
/** Returns total number of entries in the active memtable. */
public void enableNumEntriesActiveMemTable() {
this.properties.add(RocksDBProperty.NumEntriesActiveMemTable.getRocksDBProperty());
}
/** Returns total number of entries in the unflushed immutable memtables. */
public void enableNumEntriesImmMemTables() {
this.properties.add(RocksDBProperty.NumEntriesImmMemTables.getRocksDBProperty());
}
/** Returns total number of delete entries in the active memtable. */
public void enableNumDeletesActiveMemTable() {
this.properties.add(RocksDBProperty.NumDeletesActiveMemTable.getRocksDBProperty());
}
/** Returns total number of delete entries in the unflushed immutable memtables. */
public void enableNumDeletesImmMemTables() {
this.properties.add(RocksDBProperty.NumDeletesImmMemTables.getRocksDBProperty());
}
/**
* Returns estimated number of total keys in the active and unflushed immutable memtables and
* storage.
*/
public void enableEstimateNumKeys() {
this.properties.add(RocksDBProperty.EstimateNumKeys.getRocksDBProperty());
}
/**
* Returns estimated memory used for reading SST tables, excluding memory used in block cache
* (e.g.,filter and index blocks).
*/
public void enableEstimateTableReadersMem() {
this.properties.add(RocksDBProperty.EstimateTableReadersMem.getRocksDBProperty());
}
/** Returns number of unreleased snapshots of the database. */
public void enableNumSnapshots() {
this.properties.add(RocksDBProperty.NumSnapshots.getRocksDBProperty());
}
/**
* Returns number of live versions. `Version` is an internal data structure. See version_set.h
* for details. More live versions often mean more SST files are held from being deleted, by
* iterators or unfinished compactions.
*/
public void enableNumLiveVersions() {
this.properties.add(RocksDBProperty.NumLiveVersions.getRocksDBProperty());
}
/** Returns an estimate of the amount of live data in bytes. */
public void enableEstimateLiveDataSize() {
this.properties.add(RocksDBProperty.EstimateLiveDataSize.getRocksDBProperty());
}
/**
* Returns total size (bytes) of all SST files. <strong>WARNING</strong>: may slow down online
* queries if there are too many files.
*/
public void enableTotalSstFilesSize() {
this.properties.add(RocksDBProperty.TotalSstFilesSize.getRocksDBProperty());
}
public void enableLiveSstFilesSize() {
this.properties.add(RocksDBProperty.LiveSstFilesSize.getRocksDBProperty());
}
/**
* Returns estimated total number of bytes compaction needs to rewrite to get all levels down to
* under target size. Not valid for other compactions than level-based.
*/
public void enableEstimatePendingCompactionBytes() {
this.properties.add(RocksDBProperty.EstimatePendingCompactionBytes.getRocksDBProperty());
}
/** Returns the number of currently running compactions. */
public void enableNumRunningCompactions() {
this.properties.add(RocksDBProperty.NumRunningCompactions.getRocksDBProperty());
}
/** Returns the number of currently running flushes. */
public void enableNumRunningFlushes() {
this.properties.add(RocksDBProperty.NumRunningFlushes.getRocksDBProperty());
}
/** Returns the current actual delayed write rate. 0 means no delay. */
public void enableActualDelayedWriteRate() {
this.properties.add(RocksDBProperty.ActualDelayedWriteRate.getRocksDBProperty());
}
/** Returns 1 if write has been stopped. */
public void enableIsWriteStopped() {
this.properties.add(RocksDBProperty.IsWriteStopped.getRocksDBProperty());
}
/** Returns block cache capacity. */
public void enableBlockCacheCapacity() {
this.properties.add(RocksDBProperty.BlockCacheCapacity.getRocksDBProperty());
}
/** Returns the memory size for the entries residing in block cache. */
public void enableBlockCacheUsage() {
this.properties.add(RocksDBProperty.BlockCacheUsage.getRocksDBProperty());
}
/** Returns the memory size for the entries being pinned in block cache. */
public void enableBlockCachePinnedUsage() {
this.properties.add(RocksDBProperty.BlockCachePinnedUsage.getRocksDBProperty());
}
/** Returns the column family as variable. */
public void setColumnFamilyAsVariable(boolean columnFamilyAsVariable) {
this.columnFamilyAsVariable = columnFamilyAsVariable;
}
/** @return the enabled RocksDB property-based metrics */
public Collection<String> getProperties() {
return Collections.unmodifiableCollection(properties);
}
/** @return the enabled RocksDB statistics metrics. */
/**
* {{@link RocksDBNativeMetricMonitor}} is enabled if any property or ticker type is set.
*
* @return true if {{RocksDBNativeMetricMonitor}} should be enabled, false otherwise.
*/
public boolean isEnabled() {
return !properties.isEmpty() || isStatisticsEnabled();
}
/** @return true if RocksDB statistics metrics are enabled, false otherwise. */
public boolean isStatisticsEnabled() {
return !monitorTickerTypes.isEmpty();
}
/**
* {{@link RocksDBNativeMetricMonitor}} Whether to expose the column family as a variable..
*
* @return true is column family to expose variable, false otherwise.
*/
public boolean isColumnFamilyAsVariable() {
return this.columnFamilyAsVariable;
}
} |
Yes. It's inevitable IMO. Need to go through all elements in a JsonArray object. First `break` is to break iteration of inner loop, when we found the correct element. Second `break` is to break if atleast one of element is non-existent no point of continuing, just breaks outer loop. | private boolean isSubArray(JsonArray subArray, JsonArray array) {
boolean isSubList = true;
for (JsonElement exp : subArray) {
isSubList = false;
for (JsonElement res : array) {
if (exp.equals(res)) {
isSubList = true;
break;
}
}
if (!isSubList) {
break;
}
}
return isSubList;
} | if (!isSubList) { | private boolean isSubArray(JsonArray subArray, JsonArray array) {
boolean isSubList = true;
for (JsonElement exp : subArray) {
isSubList = false;
for (JsonElement res : array) {
if (exp.equals(res)) {
isSubList = true;
break;
}
}
if (!isSubList) {
break;
}
}
return isSubList;
} | class RenameTest {
private Endpoint serviceEndpoint;
private JsonParser parser = new JsonParser();
private Path sourcesPath = FileUtils.RES_DIR.resolve("rename");
@BeforeClass
public void init() {
this.serviceEndpoint = TestUtil.initializeLanguageSever();
String indexDumpPath = Paths.get("target/lang-server-index.sql").toAbsolutePath().toString();
LSIndexImpl.getInstance().initFromIndexDump(indexDumpPath);
}
@Test(dataProvider = "rename-data-provider")
public void test(String config, String source)
throws IOException {
String configJsonPath = "rename" + File.separator + config;
Path sourcePath = sourcesPath.resolve("source").resolve(source);
JsonObject configJsonObject = FileUtils.fileContentAsObject(configJsonPath);
JsonArray expectedJson = configJsonObject.get("expected").getAsJsonArray();
TestUtil.openDocument(this.serviceEndpoint, sourcePath);
String response = getRenameResponse(configJsonObject, sourcePath);
TestUtil.closeDocument(this.serviceEndpoint, sourcePath);
JsonObject json = parser.parse(response).getAsJsonObject();
JsonArray responseJson = new JsonArray();
JsonArray changes = json.getAsJsonObject("result").getAsJsonArray("documentChanges");
for (JsonElement change : changes) {
responseJson.addAll(change.getAsJsonObject().getAsJsonArray("edits"));
}
Assert.assertTrue(isSubArray(expectedJson, responseJson));
}
@DataProvider(name = "rename-data-provider")
public Object[][] dataProvider() {
return new Object[][] {
{"renameMultiPackagesObjFunc.json", "renameMultiPackagesObj/main.bal"},
{"renameMultiPackagesObjType.json", "renameMultiPackagesObj/main.bal"},
{"renameMultiPackagesVar.json", "renameMultiPackagesVar/main.bal"},
{"renameService2.json", "renameService2/renameService2.bal"},
{"renameResource.json", "renameService/renameService.bal"},
{"renameResource2.json", "renameService/renameService.bal"},
{"renameVar.json", "renameVar/renameVar.bal"},
{"renameXml.json", "renameXml/renameXml.bal"},
{"renameJson.json", "renameService/renameService.bal"},
{"renameClient.json", "renameClient/renameClient.bal"},
{"renameHttpResponse.json", "renameService/renameService.bal"},
{"renameEndpoint.json", "renameService/renameService.bal"},
{"renameService.json", "renameService/renameService.bal"},
{"renameObjectAttribute.json", "renameObjectAttribute/renameObjectAttribute.bal"},
{"renameFunction.json", "renameFunction/renameFunction.bal"},
{"renameObjectFunction.json", "renameObject/renameObject.bal"},
{"renameObjectInstance.json", "renameObject/renameObject.bal"},
{"renameObjectType.json", "renameObject/renameObject.bal"},
{"renameArray.json", "renameArray/renameArray.bal"},
{"renameSimpleVariable.json", "renameSimpleVariable/renameSimpleVariable.bal"},
{"renameMap.json", "renameMap/renameMap.bal"},
{"renameWithinTransaction.json", "renameWithinTransaction/renameWithinTransaction.bal"},
{"renameWithinTryCatch.json", "renameWithinTryCatch/renameWithinTryCatch.bal"},
{"renameWithinWhile.json", "renameWithinWhile/renameWithinWhile.bal"},
};
}
private String getRenameResponse(JsonObject config, Path sourcePath) throws IOException {
JsonObject positionObj = config.get("position").getAsJsonObject();
String newName = config.get("newName").getAsString();
Position position = new Position();
position.setLine(positionObj.get("line").getAsInt());
position.setCharacter(positionObj.get("character").getAsInt());
return TestUtil.getRenameResponse(sourcePath.toString(), position, newName, this.serviceEndpoint);
}
@AfterClass
public void cleanupLanguageServer() {
TestUtil.shutdownLanguageServer(this.serviceEndpoint);
}
} | class RenameTest {
private Endpoint serviceEndpoint;
private JsonParser parser = new JsonParser();
private Path sourcesPath = FileUtils.RES_DIR.resolve("rename");
@BeforeClass
public void init() {
this.serviceEndpoint = TestUtil.initializeLanguageSever();
String indexDumpPath = Paths.get("target/lang-server-index.sql").toAbsolutePath().toString();
LSIndexImpl.getInstance().initFromIndexDump(indexDumpPath);
}
@Test(dataProvider = "rename-data-provider")
public void test(String config, String source)
throws IOException {
String configJsonPath = "rename" + File.separator + config;
Path sourcePath = sourcesPath.resolve("source").resolve(source);
JsonObject configJsonObject = FileUtils.fileContentAsObject(configJsonPath);
JsonArray expectedJson = configJsonObject.get("expected").getAsJsonArray();
TestUtil.openDocument(this.serviceEndpoint, sourcePath);
String response = getRenameResponse(configJsonObject, sourcePath);
TestUtil.closeDocument(this.serviceEndpoint, sourcePath);
JsonObject json = parser.parse(response).getAsJsonObject();
JsonArray responseJson = new JsonArray();
JsonArray changes = json.getAsJsonObject("result").getAsJsonArray("documentChanges");
for (JsonElement change : changes) {
responseJson.addAll(change.getAsJsonObject().getAsJsonArray("edits"));
}
Assert.assertTrue(isSubArray(expectedJson, responseJson));
}
@DataProvider(name = "rename-data-provider")
public Object[][] dataProvider() {
return new Object[][] {
{"renameMultiPackagesObjFunc.json", "renameMultiPackagesObj/main.bal"},
{"renameMultiPackagesObjType.json", "renameMultiPackagesObj/main.bal"},
{"renameMultiPackagesVar.json", "renameMultiPackagesVar/main.bal"},
{"renameService2.json", "renameService2/renameService2.bal"},
{"renameResource.json", "renameService/renameService.bal"},
{"renameResource2.json", "renameService/renameService.bal"},
{"renameVar.json", "renameVar/renameVar.bal"},
{"renameXml.json", "renameXml/renameXml.bal"},
{"renameJson.json", "renameService/renameService.bal"},
{"renameClient.json", "renameClient/renameClient.bal"},
{"renameHttpResponse.json", "renameService/renameService.bal"},
{"renameEndpoint.json", "renameService/renameService.bal"},
{"renameService.json", "renameService/renameService.bal"},
{"renameObjectAttribute.json", "renameObjectAttribute/renameObjectAttribute.bal"},
{"renameFunction.json", "renameFunction/renameFunction.bal"},
{"renameObjectFunction.json", "renameObject/renameObject.bal"},
{"renameObjectInstance.json", "renameObject/renameObject.bal"},
{"renameObjectType.json", "renameObject/renameObject.bal"},
{"renameArray.json", "renameArray/renameArray.bal"},
{"renameSimpleVariable.json", "renameSimpleVariable/renameSimpleVariable.bal"},
{"renameMap.json", "renameMap/renameMap.bal"},
{"renameWithinTransaction.json", "renameWithinTransaction/renameWithinTransaction.bal"},
{"renameWithinTryCatch.json", "renameWithinTryCatch/renameWithinTryCatch.bal"},
{"renameWithinWhile.json", "renameWithinWhile/renameWithinWhile.bal"},
};
}
private String getRenameResponse(JsonObject config, Path sourcePath) throws IOException {
JsonObject positionObj = config.get("position").getAsJsonObject();
String newName = config.get("newName").getAsString();
Position position = new Position();
position.setLine(positionObj.get("line").getAsInt());
position.setCharacter(positionObj.get("character").getAsInt());
return TestUtil.getRenameResponse(sourcePath.toString(), position, newName, this.serviceEndpoint);
}
@AfterClass
public void cleanupLanguageServer() {
TestUtil.shutdownLanguageServer(this.serviceEndpoint);
}
} |
would it be worth wrapping the collection in ImmutableList? | private CompositeProvider(List<TranslatorProvider> orderedTranslatorsChain) {
requireNonNull(orderedTranslatorsChain);
this.orderedTranslatorsChain = orderedTranslatorsChain;
} | this.orderedTranslatorsChain = orderedTranslatorsChain; | private CompositeProvider(List<TranslatorProvider> orderedTranslatorsChain) {
requireNonNull(orderedTranslatorsChain);
this.orderedTranslatorsChain = Collections.unmodifiableList(orderedTranslatorsChain);
} | class CompositeProvider implements TranslatorProvider {
private final List<TranslatorProvider> orderedTranslatorsChain;
public static CompositeProvider of(List<TranslatorProvider> orderedTranslatorsChain) {
return new CompositeProvider(orderedTranslatorsChain);
}
public static CompositeProvider of(TranslatorProvider... orderedTranslatorsChain) {
requireNonNull(orderedTranslatorsChain);
return new CompositeProvider(Arrays.asList(orderedTranslatorsChain));
}
/**
* Returns first {@code Optional<OperatorTranslator<InputT, OutputT, OperatorT>>} which {@link
* OperatorTranslator
*
* <p>Translators are acquired by calling {@link TranslatorProvider
* {@link TranslatorProvider TranslatorProviders} from list given at construction time in given
* order.
*
* @param operator operator to translate
* @param <InputT> the type of input elements
* @param <OutputT> the type of output elements
* @param <OperatorT> the type of the euphoria operator
* @return first {@code Optional<OperatorTranslator<InputT, OutputT, OperatorT>>} which {@link
* OperatorTranslator
*/
@Override
public <InputT, OutputT, OperatorT extends Operator<OutputT>>
Optional<OperatorTranslator<InputT, OutputT, OperatorT>> findTranslator(OperatorT operator) {
for (int i = 0; i < orderedTranslatorsChain.size(); i++) {
Optional<OperatorTranslator<InputT, OutputT, OperatorT>> maybeTranslator =
orderedTranslatorsChain.get(i).findTranslator(operator);
if (maybeTranslator.isPresent() && maybeTranslator.get().canTranslate(operator)) {
return maybeTranslator;
}
}
return Optional.empty();
}
} | class CompositeProvider implements TranslatorProvider {
private final List<TranslatorProvider> orderedTranslatorsChain;
public static CompositeProvider of(List<TranslatorProvider> orderedTranslatorsChain) {
return new CompositeProvider(orderedTranslatorsChain);
}
public static CompositeProvider of(TranslatorProvider... orderedTranslatorsChain) {
requireNonNull(orderedTranslatorsChain);
return new CompositeProvider(Arrays.asList(orderedTranslatorsChain));
}
/**
* Returns first {@code Optional<OperatorTranslator<InputT, OutputT, OperatorT>>} which {@link
* OperatorTranslator
*
* <p>Translators are acquired by calling {@link TranslatorProvider
* {@link TranslatorProvider TranslatorProviders} from list given at construction time in given
* order.
*
* @param operator operator to translate
* @param <InputT> the type of input elements
* @param <OutputT> the type of output elements
* @param <OperatorT> the type of the euphoria operator
* @return first {@code Optional<OperatorTranslator<InputT, OutputT, OperatorT>>} which {@link
* OperatorTranslator
*/
@Override
public <InputT, OutputT, OperatorT extends Operator<OutputT>>
Optional<OperatorTranslator<InputT, OutputT, OperatorT>> findTranslator(OperatorT operator) {
for (TranslatorProvider provider : orderedTranslatorsChain) {
Optional<OperatorTranslator<InputT, OutputT, OperatorT>> maybeTranslator =
provider.findTranslator(operator);
if (maybeTranslator.isPresent() && maybeTranslator.get().canTranslate(operator)) {
return maybeTranslator;
}
}
return Optional.empty();
}
} |
I see, thanks for your clarifications. I misunderstood you here initially. :+1: | public void testCheckpointRescalingNonPartitionedStateCausesException() throws Exception {
final int parallelism = totalSlots / 2;
final int parallelism2 = totalSlots;
final int maxParallelism = 13;
ClusterClient<?> client = cluster.getClusterClient();
try {
JobGraph jobGraph =
createJobGraphWithOperatorState(
parallelism, maxParallelism, OperatorCheckpointMethod.NON_PARTITIONED);
StateSourceBase.canFinishLatch = new CountDownLatch(1);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);
StateSourceBase.workStartedLatch.await();
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
StateSourceBase.canFinishLatch.countDown();
client.requestJobResult(jobID).get();
} catch (JobExecutionException exception) {
if (!(exception.getCause() instanceof IllegalStateException)) {
throw exception;
}
}
} | waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2); | public void testCheckpointRescalingNonPartitionedStateCausesException() throws Exception {
final int parallelism = totalSlots / 2;
final int parallelism2 = totalSlots;
final int maxParallelism = 13;
ClusterClient<?> client = cluster.getClusterClient();
try {
JobGraph jobGraph =
createJobGraphWithOperatorState(
parallelism, maxParallelism, OperatorCheckpointMethod.NON_PARTITIONED);
StateSourceBase.canFinishLatch = new CountDownLatch(1);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);
StateSourceBase.workStartedLatch.await();
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
StateSourceBase.canFinishLatch.countDown();
client.requestJobResult(jobID).get();
} catch (JobExecutionException exception) {
if (!(exception.getCause() instanceof IllegalStateException)) {
throw exception;
}
}
} | class AutoRescalingITCase extends TestLogger {
@ClassRule
public static final TestExecutorResource<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorResource();
private static final int numTaskManagers = 2;
private static final int slotsPerTaskManager = 2;
private static final int totalSlots = numTaskManagers * slotsPerTaskManager;
@Parameterized.Parameters(name = "backend = {0}, buffersPerChannel = {1}")
public static Collection<Object[]> data() {
return Arrays.asList(
new Object[][] {
{"rocksdb", 0}, {"rocksdb", 2}, {"filesystem", 0}, {"filesystem", 2}
});
}
public AutoRescalingITCase(String backend, int buffersPerChannel) {
this.backend = backend;
this.buffersPerChannel = buffersPerChannel;
}
private final String backend;
private final int buffersPerChannel;
private String currentBackend = null;
enum OperatorCheckpointMethod {
NON_PARTITIONED,
CHECKPOINTED_FUNCTION,
CHECKPOINTED_FUNCTION_BROADCAST,
LIST_CHECKPOINTED
}
private static MiniClusterWithClientResource cluster;
private static RestClusterClient<?> restClusterClient;
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void setup() throws Exception {
if (!Objects.equals(currentBackend, backend)) {
shutDownExistingCluster();
currentBackend = backend;
Configuration config = new Configuration();
final File checkpointDir = temporaryFolder.newFolder();
final File savepointDir = temporaryFolder.newFolder();
config.set(StateBackendOptions.STATE_BACKEND, currentBackend);
config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);
config.set(CheckpointingOptions.LOCAL_RECOVERY, true);
config.set(
CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());
config.set(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());
config.set(
NettyShuffleEnvironmentOptions.NETWORK_BUFFERS_PER_CHANNEL, buffersPerChannel);
config.set(JobManagerOptions.SCHEDULER, JobManagerOptions.SchedulerType.Adaptive);
config.set(JobManagerOptions.SCHEDULER_SCALING_INTERVAL_MIN, Duration.ofMillis(0));
config.set(WebOptions.REFRESH_INTERVAL, 50L);
config.set(JobManagerOptions.SLOT_IDLE_TIMEOUT, 50L);
cluster =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(config)
.setNumberTaskManagers(numTaskManagers)
.setNumberSlotsPerTaskManager(slotsPerTaskManager)
.build());
cluster.before();
restClusterClient = cluster.getRestClusterClient();
}
}
@AfterClass
public static void shutDownExistingCluster() {
if (cluster != null) {
cluster.after();
cluster = null;
}
}
@Test
public void testCheckpointRescalingInKeyedState() throws Exception {
testCheckpointRescalingKeyedState(false);
}
@Test
public void testCheckpointRescalingOutKeyedState() throws Exception {
testCheckpointRescalingKeyedState(true);
}
/**
* Tests that a job with purely keyed state can be restarted from a checkpoint with a different
* parallelism.
*/
public void testCheckpointRescalingKeyedState(boolean scaleOut) throws Exception {
final int numberKeys = 42;
final int numberElements = 1000;
final int parallelism = scaleOut ? totalSlots / 2 : totalSlots;
final int parallelism2 = scaleOut ? totalSlots : totalSlots / 2;
final int maxParallelism = 13;
Duration timeout = Duration.ofMinutes(3);
Deadline deadline = Deadline.now().plus(timeout);
ClusterClient<?> client = cluster.getClusterClient();
try {
JobGraph jobGraph =
createJobGraphWithKeyedState(
new Configuration(),
parallelism,
maxParallelism,
numberKeys,
numberElements);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
SubtaskIndexSource.SOURCE_LATCH.trigger();
assertTrue(
SubtaskIndexFlatMapper.workCompletedLatch.await(
deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));
Set<Tuple2<Integer, Integer>> actualResult = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism, keyGroupIndex),
numberElements * key));
}
assertEquals(expectedResult, actualResult);
CollectionSink.clearElementsSet();
waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
SubtaskIndexSource.SOURCE_LATCH.reset();
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
SubtaskIndexSource.SOURCE_LATCH.trigger();
client.requestJobResult(jobID).get();
Set<Tuple2<Integer, Integer>> actualResult2 = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult2 = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult2.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism2, keyGroupIndex),
key * 2 * numberElements));
}
assertEquals(expectedResult2, actualResult2);
} finally {
CollectionSink.clearElementsSet();
}
}
/**
* Tests that a job cannot be restarted from a checkpoint with a different parallelism if the
* rescaled operator has non-partitioned state.
*/
@Test
/**
* Tests that a job with non partitioned state can be restarted from a checkpoint with a
* different parallelism if the operator with non-partitioned state are not rescaled.
*/
@Test
public void testCheckpointRescalingWithKeyedAndNonPartitionedState() throws Exception {
int numberKeys = 42;
int numberElements = 1000;
int parallelism = totalSlots / 2;
int parallelism2 = totalSlots;
int maxParallelism = 13;
Duration timeout = Duration.ofMinutes(3);
Deadline deadline = Deadline.now().plus(timeout);
ClusterClient<?> client = cluster.getClusterClient();
try {
JobGraph jobGraph =
createJobGraphWithKeyedAndNonPartitionedOperatorState(
parallelism,
maxParallelism,
parallelism,
numberKeys,
numberElements,
numberElements);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
SubtaskIndexSource.SOURCE_LATCH.trigger();
assertTrue(
SubtaskIndexFlatMapper.workCompletedLatch.await(
deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));
Set<Tuple2<Integer, Integer>> actualResult = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism, keyGroupIndex),
numberElements * key));
}
assertEquals(expectedResult, actualResult);
CollectionSink.clearElementsSet();
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
SubtaskIndexSource.SOURCE_LATCH.reset();
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
if (vertex.getMaxParallelism() >= parallelism2) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
} else {
builder.setParallelismForJobVertex(
vertex.getID(), vertex.getMaxParallelism(), vertex.getMaxParallelism());
}
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, parallelism + parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
SubtaskIndexSource.SOURCE_LATCH.trigger();
client.requestJobResult(jobID).get();
Set<Tuple2<Integer, Integer>> actualResult2 = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult2 = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult2.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism2, keyGroupIndex),
key * 2 * numberElements));
}
assertEquals(expectedResult2, actualResult2);
} finally {
CollectionSink.clearElementsSet();
}
}
@Test
public void testCheckpointRescalingInPartitionedOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);
}
@Test
public void testCheckpointRescalingOutPartitionedOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);
}
@Test
public void testCheckpointRescalingInBroadcastOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);
}
@Test
public void testCheckpointRescalingOutBroadcastOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);
}
/** Tests rescaling of partitioned operator state. */
public void testCheckpointRescalingPartitionedOperatorState(
boolean scaleOut, OperatorCheckpointMethod checkpointMethod) throws Exception {
final int parallelism = scaleOut ? totalSlots : totalSlots / 2;
final int parallelism2 = scaleOut ? totalSlots / 2 : totalSlots;
final int maxParallelism = 13;
ClusterClient<?> client = cluster.getClusterClient();
int counterSize = Math.max(parallelism, parallelism2);
if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION
|| checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST) {
PartitionedStateSource.checkCorrectSnapshot = new int[counterSize];
PartitionedStateSource.checkCorrectRestore = new int[counterSize];
PartitionedStateSource.checkCorrectSnapshots.clear();
} else {
throw new UnsupportedOperationException("Unsupported method:" + checkpointMethod);
}
JobGraph jobGraph =
createJobGraphWithOperatorState(parallelism, maxParallelism, checkpointMethod);
StateSourceBase.canFinishLatch = new CountDownLatch(1);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);
StateSourceBase.workStartedLatch.await();
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
StateSourceBase.canFinishLatch.countDown();
client.requestJobResult(jobID).get();
int sumExp = 0;
int sumAct = 0;
if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION) {
for (int c : PartitionedStateSource.checkCorrectSnapshot) {
sumExp += c;
}
for (int c : PartitionedStateSource.checkCorrectRestore) {
sumAct += c;
}
} else {
for (int c : PartitionedStateSource.checkCorrectSnapshot) {
sumExp += c;
}
for (int c : PartitionedStateSource.checkCorrectRestore) {
sumAct += c;
}
sumExp *= parallelism2;
}
assertEquals(sumExp, sumAct);
}
private static void configureCheckpointing(CheckpointConfig config) {
config.setCheckpointInterval(100);
config.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
config.enableUnalignedCheckpoints(true);
}
private static JobGraph createJobGraphWithOperatorState(
int parallelism, int maxParallelism, OperatorCheckpointMethod checkpointMethod) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
configureCheckpointing(env.getCheckpointConfig());
env.setParallelism(parallelism);
env.getConfig().setMaxParallelism(maxParallelism);
env.setRestartStrategy(RestartStrategies.noRestart());
StateSourceBase.workStartedLatch = new CountDownLatch(parallelism);
SourceFunction<Integer> src;
switch (checkpointMethod) {
case CHECKPOINTED_FUNCTION:
src = new PartitionedStateSource(false);
break;
case CHECKPOINTED_FUNCTION_BROADCAST:
src = new PartitionedStateSource(true);
break;
case NON_PARTITIONED:
src = new NonPartitionedStateSource();
break;
default:
throw new IllegalArgumentException(checkpointMethod.name());
}
DataStream<Integer> input = env.addSource(src);
input.sinkTo(new DiscardingSink<>());
return env.getStreamGraph().getJobGraph();
}
public static JobGraph createJobGraphWithKeyedState(
Configuration configuration,
int parallelism,
int maxParallelism,
int numberKeys,
int numberElements) {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(parallelism);
if (0 < maxParallelism) {
env.getConfig().setMaxParallelism(maxParallelism);
}
configureCheckpointing(env.getCheckpointConfig());
env.setRestartStrategy(RestartStrategies.noRestart());
env.getConfig().setUseSnapshotCompression(true);
DataStream<Integer> input =
env.addSource(new SubtaskIndexSource(numberKeys, numberElements, parallelism))
.keyBy(
new KeySelector<Integer, Integer>() {
private static final long serialVersionUID =
-7952298871120320940L;
@Override
public Integer getKey(Integer value) {
return value;
}
});
SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);
DataStream<Tuple2<Integer, Integer>> result =
input.flatMap(new SubtaskIndexFlatMapper(numberElements));
result.addSink(new CollectionSink<>());
return env.getStreamGraph().getJobGraph();
}
private static JobGraph createJobGraphWithKeyedAndNonPartitionedOperatorState(
int parallelism,
int maxParallelism,
int fixedParallelism,
int numberKeys,
int numberElements,
int numberElementsAfterRestart) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(parallelism);
env.getConfig().setMaxParallelism(maxParallelism);
configureCheckpointing(env.getCheckpointConfig());
env.setRestartStrategy(RestartStrategies.noRestart());
DataStream<Integer> input =
env.addSource(
new SubtaskIndexNonPartitionedStateSource(
numberKeys,
numberElements,
numberElementsAfterRestart,
parallelism))
.setParallelism(fixedParallelism)
.setMaxParallelism(fixedParallelism)
.keyBy(
new KeySelector<Integer, Integer>() {
private static final long serialVersionUID =
-7952298871120320940L;
@Override
public Integer getKey(Integer value) {
return value;
}
});
SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);
DataStream<Tuple2<Integer, Integer>> result =
input.flatMap(new SubtaskIndexFlatMapper(numberElements));
result.addSink(new CollectionSink<>());
return env.getStreamGraph().getJobGraph();
}
private static class SubtaskIndexSource extends RichParallelSourceFunction<Integer> {
private static final long serialVersionUID = -400066323594122516L;
private final int numberKeys;
private final int originalParallelism;
protected int numberElements;
protected int counter = 0;
private boolean running = true;
private static final OneShotLatch SOURCE_LATCH = new OneShotLatch();
SubtaskIndexSource(int numberKeys, int numberElements, int originalParallelism) {
this.numberKeys = numberKeys;
this.numberElements = numberElements;
this.originalParallelism = originalParallelism;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
RuntimeContext runtimeContext = getRuntimeContext();
final int subtaskIndex = runtimeContext.getTaskInfo().getIndexOfThisSubtask();
boolean isRestartedOrRescaled =
runtimeContext.getTaskInfo().getNumberOfParallelSubtasks()
!= originalParallelism
|| runtimeContext.getTaskInfo().getAttemptNumber() > 0;
while (running) {
SOURCE_LATCH.await();
if (counter < numberElements) {
synchronized (ctx.getCheckpointLock()) {
for (int value = subtaskIndex;
value < numberKeys;
value +=
runtimeContext
.getTaskInfo()
.getNumberOfParallelSubtasks()) {
ctx.collect(value);
}
counter++;
}
} else {
if (isRestartedOrRescaled) {
running = false;
} else {
Thread.sleep(100);
}
}
}
}
@Override
public void cancel() {
running = false;
}
}
private static class SubtaskIndexNonPartitionedStateSource extends SubtaskIndexSource
implements ListCheckpointed<Integer> {
private static final long serialVersionUID = 8388073059042040203L;
private final int numElementsAfterRestart;
SubtaskIndexNonPartitionedStateSource(
int numberKeys,
int numberElements,
int numElementsAfterRestart,
int originalParallelism) {
super(numberKeys, numberElements, originalParallelism);
this.numElementsAfterRestart = numElementsAfterRestart;
}
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) {
return Collections.singletonList(this.counter);
}
@Override
public void restoreState(List<Integer> state) {
if (state.size() != 1) {
throw new RuntimeException(
"Test failed due to unexpected recovered state size " + state.size());
}
this.counter = state.get(0);
this.numberElements += numElementsAfterRestart;
}
}
private static class SubtaskIndexFlatMapper
extends RichFlatMapFunction<Integer, Tuple2<Integer, Integer>>
implements CheckpointedFunction {
private static final long serialVersionUID = 5273172591283191348L;
private static CountDownLatch workCompletedLatch = new CountDownLatch(1);
private transient ValueState<Integer> counter;
private transient ValueState<Integer> sum;
private final int numberElements;
SubtaskIndexFlatMapper(int numberElements) {
this.numberElements = numberElements;
}
@Override
public void flatMap(Integer value, Collector<Tuple2<Integer, Integer>> out)
throws Exception {
int count = counter.value() + 1;
counter.update(count);
int s = sum.value() + value;
sum.update(s);
if (count % numberElements == 0) {
out.collect(
Tuple2.of(getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(), s));
workCompletedLatch.countDown();
}
}
@Override
public void snapshotState(FunctionSnapshotContext context) {
}
@Override
public void initializeState(FunctionInitializationContext context) {
counter =
context.getKeyedStateStore()
.getState(new ValueStateDescriptor<>("counter", Integer.class, 0));
sum =
context.getKeyedStateStore()
.getState(new ValueStateDescriptor<>("sum", Integer.class, 0));
}
}
private static class CollectionSink<IN> implements SinkFunction<IN> {
private static final Set<Object> elements =
Collections.newSetFromMap(new ConcurrentHashMap<>());
private static final long serialVersionUID = -1652452958040267745L;
public static <IN> Set<IN> getElementsSet() {
return (Set<IN>) elements;
}
public static void clearElementsSet() {
elements.clear();
}
@Override
public void invoke(IN value) {
elements.add(value);
}
}
private static class StateSourceBase extends RichParallelSourceFunction<Integer> {
private static final long serialVersionUID = 7512206069681177940L;
private static CountDownLatch workStartedLatch = new CountDownLatch(1);
private static CountDownLatch canFinishLatch = new CountDownLatch(0);
protected volatile int counter = 0;
protected volatile boolean running = true;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
while (running) {
synchronized (ctx.getCheckpointLock()) {
++counter;
ctx.collect(1);
}
Thread.sleep(2);
if (counter == 10) {
workStartedLatch.countDown();
}
if (counter >= 500) {
break;
}
}
canFinishLatch.await();
}
@Override
public void cancel() {
running = false;
}
}
private static class NonPartitionedStateSource extends StateSourceBase
implements ListCheckpointed<Integer> {
private static final long serialVersionUID = -8108185918123186841L;
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) {
return Collections.singletonList(this.counter);
}
@Override
public void restoreState(List<Integer> state) {
if (!state.isEmpty()) {
this.counter = state.get(0);
}
}
}
private static class PartitionedStateSource extends StateSourceBase
implements CheckpointedFunction {
private static final long serialVersionUID = -359715965103593462L;
private static final int NUM_PARTITIONS = 7;
private transient ListState<Integer> counterPartitions;
private final boolean broadcast;
private static final ConcurrentHashMap<Long, int[]> checkCorrectSnapshots =
new ConcurrentHashMap<>();
private static int[] checkCorrectSnapshot;
private static int[] checkCorrectRestore;
public PartitionedStateSource(boolean broadcast) {
this.broadcast = broadcast;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
if (getRuntimeContext().getTaskInfo().getAttemptNumber() == 0) {
int[] snapshot =
checkCorrectSnapshots.computeIfAbsent(
context.getCheckpointId(),
(x) -> new int[checkCorrectRestore.length]);
snapshot[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = counter;
}
counterPartitions.clear();
int div = counter / NUM_PARTITIONS;
int mod = counter % NUM_PARTITIONS;
for (int i = 0; i < NUM_PARTITIONS; ++i) {
int partitionValue = div;
if (mod > 0) {
--mod;
++partitionValue;
}
counterPartitions.add(partitionValue);
}
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
if (broadcast) {
this.counterPartitions =
context.getOperatorStateStore()
.getUnionListState(
new ListStateDescriptor<>(
"counter_partitions", IntSerializer.INSTANCE));
} else {
this.counterPartitions =
context.getOperatorStateStore()
.getListState(
new ListStateDescriptor<>(
"counter_partitions", IntSerializer.INSTANCE));
}
if (context.isRestored()) {
for (int v : counterPartitions.get()) {
counter += v;
}
checkCorrectRestore[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] =
counter;
context.getRestoredCheckpointId()
.ifPresent((id) -> checkCorrectSnapshot = checkCorrectSnapshots.get(id));
}
}
}
} | class AutoRescalingITCase extends TestLogger {
@ClassRule
public static final TestExecutorResource<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorResource();
private static final int numTaskManagers = 2;
private static final int slotsPerTaskManager = 2;
private static final int totalSlots = numTaskManagers * slotsPerTaskManager;
@Parameterized.Parameters(name = "backend = {0}, buffersPerChannel = {1}")
public static Collection<Object[]> data() {
return Arrays.asList(
new Object[][] {
{"rocksdb", 0}, {"rocksdb", 2}, {"filesystem", 0}, {"filesystem", 2}
});
}
public AutoRescalingITCase(String backend, int buffersPerChannel) {
this.backend = backend;
this.buffersPerChannel = buffersPerChannel;
}
private final String backend;
private final int buffersPerChannel;
private String currentBackend = null;
enum OperatorCheckpointMethod {
NON_PARTITIONED,
CHECKPOINTED_FUNCTION,
CHECKPOINTED_FUNCTION_BROADCAST,
LIST_CHECKPOINTED
}
private static MiniClusterWithClientResource cluster;
private static RestClusterClient<?> restClusterClient;
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void setup() throws Exception {
if (!Objects.equals(currentBackend, backend)) {
shutDownExistingCluster();
currentBackend = backend;
Configuration config = new Configuration();
final File checkpointDir = temporaryFolder.newFolder();
final File savepointDir = temporaryFolder.newFolder();
config.set(StateBackendOptions.STATE_BACKEND, currentBackend);
config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);
config.set(CheckpointingOptions.LOCAL_RECOVERY, true);
config.set(
CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());
config.set(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());
config.set(
NettyShuffleEnvironmentOptions.NETWORK_BUFFERS_PER_CHANNEL, buffersPerChannel);
config.set(JobManagerOptions.SCHEDULER, JobManagerOptions.SchedulerType.Adaptive);
config.set(JobManagerOptions.SCHEDULER_SCALING_INTERVAL_MIN, Duration.ofMillis(0));
config.set(WebOptions.REFRESH_INTERVAL, 50L);
config.set(JobManagerOptions.SLOT_IDLE_TIMEOUT, 50L);
cluster =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(config)
.setNumberTaskManagers(numTaskManagers)
.setNumberSlotsPerTaskManager(slotsPerTaskManager)
.build());
cluster.before();
restClusterClient = cluster.getRestClusterClient();
}
}
@AfterClass
public static void shutDownExistingCluster() {
if (cluster != null) {
cluster.after();
cluster = null;
}
}
@Test
public void testCheckpointRescalingInKeyedState() throws Exception {
testCheckpointRescalingKeyedState(false);
}
@Test
public void testCheckpointRescalingOutKeyedState() throws Exception {
testCheckpointRescalingKeyedState(true);
}
/**
* Tests that a job with purely keyed state can be restarted from a checkpoint with a different
* parallelism.
*/
public void testCheckpointRescalingKeyedState(boolean scaleOut) throws Exception {
final int numberKeys = 42;
final int numberElements = 1000;
final int parallelism = scaleOut ? totalSlots / 2 : totalSlots;
final int parallelism2 = scaleOut ? totalSlots : totalSlots / 2;
final int maxParallelism = 13;
Duration timeout = Duration.ofMinutes(3);
Deadline deadline = Deadline.now().plus(timeout);
ClusterClient<?> client = cluster.getClusterClient();
try {
JobGraph jobGraph =
createJobGraphWithKeyedState(
new Configuration(),
parallelism,
maxParallelism,
numberKeys,
numberElements);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
SubtaskIndexSource.SOURCE_LATCH.trigger();
assertTrue(
SubtaskIndexFlatMapper.workCompletedLatch.await(
deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));
Set<Tuple2<Integer, Integer>> actualResult = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism, keyGroupIndex),
numberElements * key));
}
assertEquals(expectedResult, actualResult);
CollectionSink.clearElementsSet();
waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
SubtaskIndexSource.SOURCE_LATCH.reset();
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
SubtaskIndexSource.SOURCE_LATCH.trigger();
client.requestJobResult(jobID).get();
Set<Tuple2<Integer, Integer>> actualResult2 = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult2 = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult2.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism2, keyGroupIndex),
key * 2 * numberElements));
}
assertEquals(expectedResult2, actualResult2);
} finally {
CollectionSink.clearElementsSet();
}
}
/**
* Tests that a job cannot be restarted from a checkpoint with a different parallelism if the
* rescaled operator has non-partitioned state.
*/
@Test
/**
* Tests that a job with non partitioned state can be restarted from a checkpoint with a
* different parallelism if the operator with non-partitioned state are not rescaled.
*/
@Test
public void testCheckpointRescalingWithKeyedAndNonPartitionedState() throws Exception {
int numberKeys = 42;
int numberElements = 1000;
int parallelism = totalSlots / 2;
int parallelism2 = totalSlots;
int maxParallelism = 13;
Duration timeout = Duration.ofMinutes(3);
Deadline deadline = Deadline.now().plus(timeout);
ClusterClient<?> client = cluster.getClusterClient();
try {
JobGraph jobGraph =
createJobGraphWithKeyedAndNonPartitionedOperatorState(
parallelism,
maxParallelism,
parallelism,
numberKeys,
numberElements,
numberElements);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
SubtaskIndexSource.SOURCE_LATCH.trigger();
assertTrue(
SubtaskIndexFlatMapper.workCompletedLatch.await(
deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));
Set<Tuple2<Integer, Integer>> actualResult = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism, keyGroupIndex),
numberElements * key));
}
assertEquals(expectedResult, actualResult);
CollectionSink.clearElementsSet();
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
SubtaskIndexSource.SOURCE_LATCH.reset();
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
if (vertex.getMaxParallelism() >= parallelism2) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
} else {
builder.setParallelismForJobVertex(
vertex.getID(), vertex.getMaxParallelism(), vertex.getMaxParallelism());
}
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, parallelism + parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
SubtaskIndexSource.SOURCE_LATCH.trigger();
client.requestJobResult(jobID).get();
Set<Tuple2<Integer, Integer>> actualResult2 = CollectionSink.getElementsSet();
Set<Tuple2<Integer, Integer>> expectedResult2 = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult2.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, parallelism2, keyGroupIndex),
key * 2 * numberElements));
}
assertEquals(expectedResult2, actualResult2);
} finally {
CollectionSink.clearElementsSet();
}
}
@Test
public void testCheckpointRescalingInPartitionedOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);
}
@Test
public void testCheckpointRescalingOutPartitionedOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);
}
@Test
public void testCheckpointRescalingInBroadcastOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);
}
@Test
public void testCheckpointRescalingOutBroadcastOperatorState() throws Exception {
testCheckpointRescalingPartitionedOperatorState(
true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);
}
/** Tests rescaling of partitioned operator state. */
public void testCheckpointRescalingPartitionedOperatorState(
boolean scaleOut, OperatorCheckpointMethod checkpointMethod) throws Exception {
final int parallelism = scaleOut ? totalSlots : totalSlots / 2;
final int parallelism2 = scaleOut ? totalSlots / 2 : totalSlots;
final int maxParallelism = 13;
ClusterClient<?> client = cluster.getClusterClient();
int counterSize = Math.max(parallelism, parallelism2);
if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION
|| checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST) {
PartitionedStateSource.checkCorrectSnapshot = new int[counterSize];
PartitionedStateSource.checkCorrectRestore = new int[counterSize];
PartitionedStateSource.checkCorrectSnapshots.clear();
} else {
throw new UnsupportedOperationException("Unsupported method:" + checkpointMethod);
}
JobGraph jobGraph =
createJobGraphWithOperatorState(parallelism, maxParallelism, checkpointMethod);
StateSourceBase.canFinishLatch = new CountDownLatch(1);
final JobID jobID = jobGraph.getJobID();
client.submitJob(jobGraph).get();
waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);
StateSourceBase.workStartedLatch.await();
waitForNewCheckpoint(jobID, cluster.getMiniCluster());
JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();
for (JobVertex vertex : jobGraph.getVertices()) {
builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);
}
restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();
waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);
waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);
StateSourceBase.canFinishLatch.countDown();
client.requestJobResult(jobID).get();
int sumExp = 0;
int sumAct = 0;
if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION) {
for (int c : PartitionedStateSource.checkCorrectSnapshot) {
sumExp += c;
}
for (int c : PartitionedStateSource.checkCorrectRestore) {
sumAct += c;
}
} else {
for (int c : PartitionedStateSource.checkCorrectSnapshot) {
sumExp += c;
}
for (int c : PartitionedStateSource.checkCorrectRestore) {
sumAct += c;
}
sumExp *= parallelism2;
}
assertEquals(sumExp, sumAct);
}
private static void configureCheckpointing(CheckpointConfig config) {
config.setCheckpointInterval(100);
config.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
config.enableUnalignedCheckpoints(true);
}
private static JobGraph createJobGraphWithOperatorState(
int parallelism, int maxParallelism, OperatorCheckpointMethod checkpointMethod) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
configureCheckpointing(env.getCheckpointConfig());
env.setParallelism(parallelism);
env.getConfig().setMaxParallelism(maxParallelism);
env.setRestartStrategy(RestartStrategies.noRestart());
StateSourceBase.workStartedLatch = new CountDownLatch(parallelism);
SourceFunction<Integer> src;
switch (checkpointMethod) {
case CHECKPOINTED_FUNCTION:
src = new PartitionedStateSource(false);
break;
case CHECKPOINTED_FUNCTION_BROADCAST:
src = new PartitionedStateSource(true);
break;
case NON_PARTITIONED:
src = new NonPartitionedStateSource();
break;
default:
throw new IllegalArgumentException(checkpointMethod.name());
}
DataStream<Integer> input = env.addSource(src);
input.sinkTo(new DiscardingSink<>());
return env.getStreamGraph().getJobGraph();
}
public static JobGraph createJobGraphWithKeyedState(
Configuration configuration,
int parallelism,
int maxParallelism,
int numberKeys,
int numberElements) {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(parallelism);
if (0 < maxParallelism) {
env.getConfig().setMaxParallelism(maxParallelism);
}
configureCheckpointing(env.getCheckpointConfig());
env.setRestartStrategy(RestartStrategies.noRestart());
env.getConfig().setUseSnapshotCompression(true);
DataStream<Integer> input =
env.addSource(new SubtaskIndexSource(numberKeys, numberElements, parallelism))
.keyBy(
new KeySelector<Integer, Integer>() {
private static final long serialVersionUID =
-7952298871120320940L;
@Override
public Integer getKey(Integer value) {
return value;
}
});
SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);
DataStream<Tuple2<Integer, Integer>> result =
input.flatMap(new SubtaskIndexFlatMapper(numberElements));
result.addSink(new CollectionSink<>());
return env.getStreamGraph().getJobGraph();
}
private static JobGraph createJobGraphWithKeyedAndNonPartitionedOperatorState(
int parallelism,
int maxParallelism,
int fixedParallelism,
int numberKeys,
int numberElements,
int numberElementsAfterRestart) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(parallelism);
env.getConfig().setMaxParallelism(maxParallelism);
configureCheckpointing(env.getCheckpointConfig());
env.setRestartStrategy(RestartStrategies.noRestart());
DataStream<Integer> input =
env.addSource(
new SubtaskIndexNonPartitionedStateSource(
numberKeys,
numberElements,
numberElementsAfterRestart,
parallelism))
.setParallelism(fixedParallelism)
.setMaxParallelism(fixedParallelism)
.keyBy(
new KeySelector<Integer, Integer>() {
private static final long serialVersionUID =
-7952298871120320940L;
@Override
public Integer getKey(Integer value) {
return value;
}
});
SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);
DataStream<Tuple2<Integer, Integer>> result =
input.flatMap(new SubtaskIndexFlatMapper(numberElements));
result.addSink(new CollectionSink<>());
return env.getStreamGraph().getJobGraph();
}
private static class SubtaskIndexSource extends RichParallelSourceFunction<Integer> {
private static final long serialVersionUID = -400066323594122516L;
private final int numberKeys;
private final int originalParallelism;
protected int numberElements;
protected int counter = 0;
private boolean running = true;
private static final OneShotLatch SOURCE_LATCH = new OneShotLatch();
SubtaskIndexSource(int numberKeys, int numberElements, int originalParallelism) {
this.numberKeys = numberKeys;
this.numberElements = numberElements;
this.originalParallelism = originalParallelism;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
RuntimeContext runtimeContext = getRuntimeContext();
final int subtaskIndex = runtimeContext.getTaskInfo().getIndexOfThisSubtask();
boolean isRestartedOrRescaled =
runtimeContext.getTaskInfo().getNumberOfParallelSubtasks()
!= originalParallelism
|| runtimeContext.getTaskInfo().getAttemptNumber() > 0;
while (running) {
SOURCE_LATCH.await();
if (counter < numberElements) {
synchronized (ctx.getCheckpointLock()) {
for (int value = subtaskIndex;
value < numberKeys;
value +=
runtimeContext
.getTaskInfo()
.getNumberOfParallelSubtasks()) {
ctx.collect(value);
}
counter++;
}
} else {
if (isRestartedOrRescaled) {
running = false;
} else {
Thread.sleep(100);
}
}
}
}
@Override
public void cancel() {
running = false;
}
}
private static class SubtaskIndexNonPartitionedStateSource extends SubtaskIndexSource
implements ListCheckpointed<Integer> {
private static final long serialVersionUID = 8388073059042040203L;
private final int numElementsAfterRestart;
SubtaskIndexNonPartitionedStateSource(
int numberKeys,
int numberElements,
int numElementsAfterRestart,
int originalParallelism) {
super(numberKeys, numberElements, originalParallelism);
this.numElementsAfterRestart = numElementsAfterRestart;
}
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) {
return Collections.singletonList(this.counter);
}
@Override
public void restoreState(List<Integer> state) {
if (state.size() != 1) {
throw new RuntimeException(
"Test failed due to unexpected recovered state size " + state.size());
}
this.counter = state.get(0);
this.numberElements += numElementsAfterRestart;
}
}
private static class SubtaskIndexFlatMapper
extends RichFlatMapFunction<Integer, Tuple2<Integer, Integer>>
implements CheckpointedFunction {
private static final long serialVersionUID = 5273172591283191348L;
private static CountDownLatch workCompletedLatch = new CountDownLatch(1);
private transient ValueState<Integer> counter;
private transient ValueState<Integer> sum;
private final int numberElements;
SubtaskIndexFlatMapper(int numberElements) {
this.numberElements = numberElements;
}
@Override
public void flatMap(Integer value, Collector<Tuple2<Integer, Integer>> out)
throws Exception {
int count = counter.value() + 1;
counter.update(count);
int s = sum.value() + value;
sum.update(s);
if (count % numberElements == 0) {
out.collect(
Tuple2.of(getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(), s));
workCompletedLatch.countDown();
}
}
@Override
public void snapshotState(FunctionSnapshotContext context) {
}
@Override
public void initializeState(FunctionInitializationContext context) {
counter =
context.getKeyedStateStore()
.getState(new ValueStateDescriptor<>("counter", Integer.class, 0));
sum =
context.getKeyedStateStore()
.getState(new ValueStateDescriptor<>("sum", Integer.class, 0));
}
}
private static class CollectionSink<IN> implements SinkFunction<IN> {
private static final Set<Object> elements =
Collections.newSetFromMap(new ConcurrentHashMap<>());
private static final long serialVersionUID = -1652452958040267745L;
public static <IN> Set<IN> getElementsSet() {
return (Set<IN>) elements;
}
public static void clearElementsSet() {
elements.clear();
}
@Override
public void invoke(IN value) {
elements.add(value);
}
}
private static class StateSourceBase extends RichParallelSourceFunction<Integer> {
private static final long serialVersionUID = 7512206069681177940L;
private static CountDownLatch workStartedLatch = new CountDownLatch(1);
private static CountDownLatch canFinishLatch = new CountDownLatch(0);
protected volatile int counter = 0;
protected volatile boolean running = true;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
while (running) {
synchronized (ctx.getCheckpointLock()) {
++counter;
ctx.collect(1);
}
Thread.sleep(2);
if (counter == 10) {
workStartedLatch.countDown();
}
if (counter >= 500) {
break;
}
}
canFinishLatch.await();
}
@Override
public void cancel() {
running = false;
}
}
private static class NonPartitionedStateSource extends StateSourceBase
implements ListCheckpointed<Integer> {
private static final long serialVersionUID = -8108185918123186841L;
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) {
return Collections.singletonList(this.counter);
}
@Override
public void restoreState(List<Integer> state) {
if (!state.isEmpty()) {
this.counter = state.get(0);
}
}
}
private static class PartitionedStateSource extends StateSourceBase
implements CheckpointedFunction {
private static final long serialVersionUID = -359715965103593462L;
private static final int NUM_PARTITIONS = 7;
private transient ListState<Integer> counterPartitions;
private final boolean broadcast;
private static final ConcurrentHashMap<Long, int[]> checkCorrectSnapshots =
new ConcurrentHashMap<>();
private static int[] checkCorrectSnapshot;
private static int[] checkCorrectRestore;
public PartitionedStateSource(boolean broadcast) {
this.broadcast = broadcast;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
if (getRuntimeContext().getTaskInfo().getAttemptNumber() == 0) {
int[] snapshot =
checkCorrectSnapshots.computeIfAbsent(
context.getCheckpointId(),
(x) -> new int[checkCorrectRestore.length]);
snapshot[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = counter;
}
counterPartitions.clear();
int div = counter / NUM_PARTITIONS;
int mod = counter % NUM_PARTITIONS;
for (int i = 0; i < NUM_PARTITIONS; ++i) {
int partitionValue = div;
if (mod > 0) {
--mod;
++partitionValue;
}
counterPartitions.add(partitionValue);
}
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
if (broadcast) {
this.counterPartitions =
context.getOperatorStateStore()
.getUnionListState(
new ListStateDescriptor<>(
"counter_partitions", IntSerializer.INSTANCE));
} else {
this.counterPartitions =
context.getOperatorStateStore()
.getListState(
new ListStateDescriptor<>(
"counter_partitions", IntSerializer.INSTANCE));
}
if (context.isRestored()) {
for (int v : counterPartitions.get()) {
counter += v;
}
checkCorrectRestore[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] =
counter;
context.getRestoredCheckpointId()
.ifPresent((id) -> checkCorrectSnapshot = checkCorrectSnapshots.get(id));
}
}
}
} |
Does it work to simply write Collections.sort(fields, Comparator.comparing(FieldDescriptor::getFieldId))? We needed the verbose version before because it was captured inside the Set class, so had to be serializable. I suspect we can drop back to the simple version now. | private List<FieldDescriptor> resolveDirectFieldsAccessed(Schema schema) {
List<FieldDescriptor> fields = new ArrayList<>();
for (FieldDescriptor field : getFieldsAccessed()) {
validateFieldDescriptor(schema, field);
if (field.getFieldId() == null) {
field = field.toBuilder().setFieldId(schema.indexOf(field.getFieldName())).build();
}
if (field.getFieldName() == null) {
field = field.toBuilder().setFieldName(schema.nameOf(field.getFieldId())).build();
}
field = fillInMissingQualifiers(field, schema);
fields.add(field);
}
if (!getFieldInsertionOrder()) {
Function<FieldDescriptor, Integer> extract =
(Function<FieldDescriptor, Integer> & Serializable) FieldDescriptor::getFieldId;
Collections.sort(fields, Comparator.comparing(extract));
}
return fields;
} | Collections.sort(fields, Comparator.comparing(extract)); | private List<FieldDescriptor> resolveDirectFieldsAccessed(Schema schema) {
List<FieldDescriptor> fields = new ArrayList<>();
for (FieldDescriptor field : getFieldsAccessed()) {
validateFieldDescriptor(schema, field);
if (field.getFieldId() == null) {
field = field.toBuilder().setFieldId(schema.indexOf(field.getFieldName())).build();
}
if (field.getFieldName() == null) {
field = field.toBuilder().setFieldName(schema.nameOf(field.getFieldId())).build();
}
field = fillInMissingQualifiers(field, schema);
fields.add(field);
}
if (!getFieldInsertionOrder()) {
Collections.sort(fields, Comparator.comparing(FieldDescriptor::getFieldId));
}
return fields;
} | class Builder {
abstract Builder setAllFields(boolean allFields);
abstract Builder setFieldsAccessed(List<FieldDescriptor> fieldsAccessed);
abstract Builder setNestedFieldsAccessed(
Map<FieldDescriptor, FieldAccessDescriptor> nestedFieldsAccessedById);
abstract Builder setFieldInsertionOrder(boolean insertionOrder);
abstract FieldAccessDescriptor build();
} | class Builder {
abstract Builder setAllFields(boolean allFields);
abstract Builder setFieldsAccessed(List<FieldDescriptor> fieldsAccessed);
abstract Builder setNestedFieldsAccessed(
Map<FieldDescriptor, FieldAccessDescriptor> nestedFieldsAccessedById);
abstract Builder setFieldInsertionOrder(boolean insertionOrder);
abstract FieldAccessDescriptor build();
} |
Still, I am a bit unclear about this approach. As I understood what we have to do is, if first arg and expr are the same, we need to desugar either first arg or expr. If the first arg desugared via QueryDesugar it becomes a new node. So after the L:6411 can't we just assign `invocation.expr = invocation.requiredArgs.get(0)` if isExprAndRequiredArgSame is true and remove rest of the changes? Please check whether this works. | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
boolean isExprAndRequiredArgSame = !invocation.requiredArgs.isEmpty() &&
invocation.requiredArgs.get(0) == invocation.expr;
rewriteExprs(invocation.requiredArgs);
BLangExpression firstRequiredArg = isExprAndRequiredArgSame ? invocation.requiredArgs.get(0) : null;
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
invocation.expr = isExprAndRequiredArgSame ? firstRequiredArg : invocation.expr;
invocation.expr = rewriteExpr(invocation.expr);
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | BLangExpression firstRequiredArg = isExprAndRequiredArgSame ? invocation.requiredArgs.get(0) : null; | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
if (invocation.langLibInvocation && !invocation.requiredArgs.isEmpty()) {
invocation.expr = invocation.requiredArgs.get(0);
} else {
invocation.expr = rewriteExpr(invocation.expr);
}
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
Put the error message into ErrorCode.java | private static void preCheckColumnRef(CreateTableStmt statement) {
List<ColumnDef> columnDefs = statement.getColumnDefs();
if (columnDefs == null || columnDefs.isEmpty()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_MUST_HAVE_COLUMNS);
}
if (columnDefs.size() > Config.max_column_number_per_table) {
throw new SemanticException("The number of columns in a table must be less than or equal to %d,"
+ " Please decrease the number of columns or increase frontend config 'max_column_number_per_table'.",
Config.max_column_number_per_table);
}
Set<String> columnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
for (ColumnDef columnDef : columnDefs) {
if (!columnSet.add(columnDef.getName())) {
ErrorReport.reportSemanticException(ErrorCode.ERR_DUP_FIELDNAME, columnDef.getName());
}
if (columnDef.getAggregateType() != null && columnDef.getAggregateType().isReplaceFamily()) {
statement.setHasReplace(true);
}
if (columnDef.isGeneratedColumn()) {
statement.setHasGeneratedColumn(true);
}
}
} | throw new SemanticException("The number of columns in a table must be less than or equal to %d," | private static void preCheckColumnRef(CreateTableStmt statement) {
List<ColumnDef> columnDefs = statement.getColumnDefs();
if (columnDefs == null || columnDefs.isEmpty()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_MUST_HAVE_COLUMNS);
}
if (columnDefs.size() > Config.max_column_number_per_table) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TOO_MANY_COLUMNS, Config.max_column_number_per_table);
}
Set<String> columnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
for (ColumnDef columnDef : columnDefs) {
if (!columnSet.add(columnDef.getName())) {
ErrorReport.reportSemanticException(ErrorCode.ERR_DUP_FIELDNAME, columnDef.getName());
}
if (columnDef.getAggregateType() != null && columnDef.getAggregateType().isReplaceFamily()) {
statement.setHasReplace(true);
}
if (columnDef.isGeneratedColumn()) {
statement.setHasGeneratedColumn(true);
}
}
} | class CreateTableAnalyzer {
private static final Logger LOG = LoggerFactory.getLogger(CreateTableAnalyzer.class);
public static void analyze(CreateTableStmt statement, ConnectContext context) {
final TableName tableNameObject = statement.getDbTbl();
MetaUtils.normalizationTableName(context, tableNameObject);
final String catalogName = tableNameObject.getCatalog();
MetaUtils.checkCatalogExistAndReport(catalogName);
final String tableName = tableNameObject.getTbl();
FeNameFormat.checkTableName(tableName);
Database db = MetaUtils.getDatabase(catalogName, tableNameObject.getDb());
if (statement instanceof CreateTemporaryTableStmt) {
analyzeTemporaryTable(statement, context, catalogName, db, tableName);
} else {
if (db.getTable(tableName) != null && !statement.isSetIfNotExists()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
analyzeEngineName(statement, catalogName);
analyzeCharsetName(statement);
preCheckColumnRef(statement);
analyzeKeysDesc(statement);
analyzeSortKeys(statement);
analyzePartitionDesc(statement);
analyzeDistributionDesc(statement);
analyzeColumnRef(statement, catalogName);
if (statement.isHasGeneratedColumn()) {
analyzeGeneratedColumn(statement, context);
}
analyzeIndexDefs(statement);
}
private static void analyzeTemporaryTable(CreateTableStmt stmt, ConnectContext context,
String catalogName, Database db, String tableName) {
((CreateTemporaryTableStmt) stmt).setSessionId(context.getSessionId());
if (catalogName != null && !CatalogMgr.isInternalCatalog(catalogName)) {
throw new SemanticException("temporary table must be created under internal catalog");
}
Map<String, String> properties = stmt.getProperties();
if (properties != null) {
properties.remove(PropertyAnalyzer.PROPERTIES_COLOCATE_WITH);
}
UUID sessionId = context.getSessionId();
TemporaryTableMgr temporaryTableMgr = GlobalStateMgr.getCurrentState().getTemporaryTableMgr();
if (temporaryTableMgr.tableExists(sessionId, db.getId(), tableName) && !stmt.isSetIfNotExists()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
private static void analyzeEngineName(CreateTableStmt stmt, String catalogName) {
String engineName = stmt.getEngineName();
if (CatalogMgr.isInternalCatalog(catalogName)) {
if (Strings.isNullOrEmpty(engineName)) {
engineName = EngineType.defaultEngine().name();
} else {
try {
engineName = EngineType.valueOf(engineName.toUpperCase()).name();
} catch (IllegalArgumentException e) {
throw new SemanticException("Unknown engine name: %s", engineName);
}
}
} else {
String catalogType = GlobalStateMgr.getCurrentState().getCatalogMgr().getCatalogType(catalogName);
if (catalogType.equalsIgnoreCase(ConnectorType.UNIFIED.getName())) {
if (Strings.isNullOrEmpty(engineName)) {
throw new SemanticException("Create table in unified catalog requires engine clause (ENGINE = ENGINE_NAME)");
}
} else {
if (Strings.isNullOrEmpty(engineName)) {
engineName = catalogType;
}
if (!engineName.equalsIgnoreCase(catalogType)) {
throw new SemanticException("Can't create %s table in the %s catalog", engineName, catalogType);
}
}
}
if ((stmt instanceof CreateTemporaryTableStmt) && !engineName.equalsIgnoreCase("olap")) {
throw new SemanticException("temporary table only support olap engine");
}
stmt.setEngineName(engineName.toLowerCase());
}
private static void analyzeCharsetName(CreateTableStmt stmt) {
String charsetName = stmt.getCharsetName();
if (Strings.isNullOrEmpty(charsetName)) {
charsetName = "utf8";
}
if (!charsetName.equalsIgnoreCase("utf8")) {
throw new SemanticException("charset name %s is not supported yet", charsetName);
}
stmt.setCharsetName(charsetName.toLowerCase());
}
private static void analyzeColumnRef(CreateTableStmt statement, String catalogName) {
String engineName = statement.getEngineName();
KeysDesc keysDesc = statement.getKeysDesc();
List<ColumnDef> columnDefs = statement.getColumnDefs();
if (columnDefs.stream().filter(ColumnDef::isAutoIncrement).count() > 1) {
throw new SemanticException("More than one AUTO_INCREMENT column defined in CREATE TABLE Statement",
statement.getPos());
}
List<Column> columns = new ArrayList<>();
for (ColumnDef columnDef : columnDefs) {
try {
columnDef.analyze(statement.isOlapEngine(), CatalogMgr.isInternalCatalog(catalogName), engineName);
} catch (AnalysisException e) {
LOG.error("Column definition analyze failed.", e);
throw new SemanticException(e.getMessage());
}
if (columnDef.isAutoIncrement()) {
if (columnDef.getType() != Type.BIGINT) {
throw new SemanticException("The AUTO_INCREMENT column must be BIGINT", columnDef.getPos());
}
}
if (engineName.equalsIgnoreCase(Table.TableType.MYSQL.name()) && columnDef.getType().isComplexType()) {
throw new SemanticException(engineName + " external table don't support complex type", columnDef.getPos());
}
if (!statement.isOlapEngine() && !engineName.equalsIgnoreCase(Table.TableType.HIVE.name())) {
columnDef.setIsKey(true);
}
if (columnDef.getAggregateType() == AggregateType.HLL_UNION && keysDesc != null
&& keysDesc.getKeysType() != KeysType.AGG_KEYS) {
throw new SemanticException("HLL_UNION must be used in AGG_KEYS", keysDesc.getPos());
}
if (columnDef.getAggregateType() == AggregateType.BITMAP_UNION && columnDef.getType().isBitmapType()
&& keysDesc != null && keysDesc.getKeysType() != KeysType.AGG_KEYS) {
throw new SemanticException("BITMAP_UNION must be used in AGG_KEYS", keysDesc.getPos());
}
Column col = columnDef.toColumn(null);
if (keysDesc != null && (keysDesc.getKeysType() == KeysType.UNIQUE_KEYS
|| keysDesc.getKeysType() == KeysType.PRIMARY_KEYS ||
keysDesc.getKeysType() == KeysType.DUP_KEYS)) {
if (!col.isKey()) {
col.setAggregationTypeImplicit(true);
}
}
columns.add(col);
}
statement.setColumns(columns);
}
private static void analyzeKeysDesc(CreateTableStmt stmt) {
KeysDesc keysDesc = stmt.getKeysDesc();
if (!stmt.isOlapEngine()) {
if (keysDesc != null) {
throw new SemanticException("Create " + stmt.getEngineName() + " table should not contain keys desc",
keysDesc.getPos());
}
return;
}
List<ColumnDef> columnDefs = stmt.getColumnDefs();
if (keysDesc == null) {
List<String> keysColumnNames = Lists.newArrayList();
if (columnDefs.stream().anyMatch(c -> c.getAggregateType() != null)) {
for (ColumnDef columnDef : columnDefs) {
if (columnDef.getAggregateType() == null) {
keysColumnNames.add(columnDef.getName());
}
}
keysDesc = new KeysDesc(KeysType.AGG_KEYS, keysColumnNames);
} else {
int keyLength = 0;
for (ColumnDef columnDef : columnDefs) {
keyLength += columnDef.getType().getIndexSize();
if (keysColumnNames.size() >= FeConstants.SHORTKEY_MAX_COLUMN_COUNT
|| keyLength > FeConstants.SHORTKEY_MAXSIZE_BYTES) {
if (keysColumnNames.size() == 0
&& columnDef.getType().getPrimitiveType().isCharFamily()) {
keysColumnNames.add(columnDef.getName());
}
break;
}
if (!columnDef.getType().canDistributedBy()) {
break;
}
if (columnDef.getType().getPrimitiveType() == PrimitiveType.VARCHAR) {
keysColumnNames.add(columnDef.getName());
break;
}
keysColumnNames.add(columnDef.getName());
}
if (columnDefs.isEmpty()) {
throw new SemanticException("Empty schema");
}
if (keysColumnNames.isEmpty()) {
throw new SemanticException("Data type of first column cannot be %s", columnDefs.get(0).getType());
}
keysDesc = new KeysDesc(KeysType.DUP_KEYS, keysColumnNames);
}
}
KeysType keysType = keysDesc.getKeysType();
if (keysType == null) {
throw new SemanticException("Keys type is null.");
}
List<String> keysColumnNames = keysDesc.getKeysColumnNames();
if (keysColumnNames == null || keysColumnNames.size() == 0) {
throw new SemanticException("The number of key columns is 0.");
}
if (keysColumnNames.size() > columnDefs.size()) {
throw new SemanticException("The number of key columns should be less than the number of columns.");
}
for (int i = 0; i < keysColumnNames.size(); ++i) {
String colName = columnDefs.get(i).getName();
if (!keysColumnNames.get(i).equalsIgnoreCase(colName)) {
String keyName = keysColumnNames.get(i);
if (columnDefs.stream().noneMatch(col -> col.getName().equalsIgnoreCase(keyName))) {
throw new SemanticException("Key column(%s) doesn't exist.", keysColumnNames.get(i));
} else {
throw new SemanticException("Key columns must be the first few columns of the schema and the order "
+ " of the key columns must be consistent with the order of the schema");
}
}
if (columnDefs.get(i).getAggregateType() != null) {
throw new SemanticException("Key column[" + colName + "] should not specify aggregate type.");
}
if (keysType == KeysType.PRIMARY_KEYS) {
ColumnDef cd = columnDefs.get(i);
cd.setPrimaryKeyNonNullable();
if (cd.isAllowNull()) {
throw new SemanticException("primary key column[" + colName + "] cannot be nullable");
}
Type t = cd.getType();
if (!(t.isBoolean() || t.isIntegerType() || t.isLargeint() || t.isVarchar() || t.isDate() ||
t.isDatetime())) {
throw new SemanticException("primary key column[" + colName + "] type not supported: " + t.toSql());
}
}
}
for (int i = keysColumnNames.size(); i < columnDefs.size(); ++i) {
if (keysType == KeysType.AGG_KEYS) {
if (columnDefs.get(i).getAggregateType() == null) {
throw new SemanticException(keysType.name() + " table should specify aggregate type for "
+ "non-key column[" + columnDefs.get(i).getName() + "]");
}
} else {
if (columnDefs.get(i).getAggregateType() != null
&& columnDefs.get(i).getAggregateType() != AggregateType.REPLACE) {
throw new SemanticException(keysType.name() + " table should not specify aggregate type for "
+ "non-key column[" + columnDefs.get(i).getName() + "]");
}
}
}
for (int i = 0; i < keysDesc.getKeysColumnNames().size(); ++i) {
columnDefs.get(i).setIsKey(true);
}
if (keysDesc.getKeysType() != KeysType.AGG_KEYS) {
AggregateType aggregateType = keysDesc.getKeysType() == KeysType.DUP_KEYS ?
AggregateType.NONE : AggregateType.REPLACE;
for (int i = keysDesc.getKeysColumnNames().size(); i < columnDefs.size(); ++i) {
columnDefs.get(i).setAggregateType(aggregateType);
}
}
stmt.setKeysDesc(keysDesc);
}
private static void analyzeSortKeys(CreateTableStmt stmt) {
if (!stmt.isOlapEngine()) {
return;
}
KeysDesc keysDesc = stmt.getKeysDesc();
KeysType keysType = keysDesc.getKeysType();
List<ColumnDef> columnDefs = stmt.getColumnDefs();
List<String> sortKeys = stmt.getSortKeys();
List<String> columnNames = columnDefs.stream().map(ColumnDef::getName).collect(Collectors.toList());
if (sortKeys != null) {
if (keysType == KeysType.PRIMARY_KEYS) {
for (String column : sortKeys) {
int idx = columnNames.indexOf(column);
if (idx == -1) {
throw new SemanticException("Unknown column '%s' does not exist", column);
}
ColumnDef cd = columnDefs.get(idx);
Type t = cd.getType();
if (!(t.isBoolean() || t.isIntegerType() || t.isLargeint() || t.isVarchar() || t.isDate() ||
t.isDatetime())) {
throw new SemanticException("sort key column[" + cd.getName() + "] type not supported: " + t.toSql());
}
}
} else if (keysType == KeysType.DUP_KEYS) {
} else if (keysType == KeysType.AGG_KEYS || keysType == KeysType.UNIQUE_KEYS) {
List<Integer> sortKeyIdxes = Lists.newArrayList();
for (String column : sortKeys) {
int idx = columnNames.indexOf(column);
if (idx == -1) {
throw new SemanticException("Unknown column '%s' does not exist", column);
}
sortKeyIdxes.add(idx);
}
List<Integer> keyColIdxes = Lists.newArrayList();
for (String column : keysDesc.getKeysColumnNames()) {
int idx = columnNames.indexOf(column);
if (idx == -1) {
throw new SemanticException("Unknown column '%s' does not exist", column);
}
keyColIdxes.add(idx);
}
boolean res = new HashSet<>(keyColIdxes).equals(new HashSet<>(sortKeyIdxes));
if (!res) {
throw new SemanticException("The sort columns of " + keysType.toSql()
+ " table must be same with key columns");
}
} else {
throw new SemanticException("Table type:" + keysType.toSql() + " does not support sort key column");
}
}
}
public static void analyzePartitionDesc(CreateTableStmt stmt) {
String engineName = stmt.getEngineName();
PartitionDesc partitionDesc = stmt.getPartitionDesc();
if (stmt.isOlapEngine()) {
if (partitionDesc != null) {
if (partitionDesc.getType() == PartitionType.RANGE || partitionDesc.getType() == PartitionType.LIST) {
try {
PartitionDescAnalyzer.analyze(partitionDesc);
partitionDesc.analyze(stmt.getColumnDefs(), stmt.getProperties());
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
}
} else if (partitionDesc instanceof ExpressionPartitionDesc) {
ExpressionPartitionDesc expressionPartitionDesc = (ExpressionPartitionDesc) partitionDesc;
try {
PartitionDescAnalyzer.analyze(partitionDesc);
expressionPartitionDesc.analyze(stmt.getColumnDefs(), stmt.getProperties());
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
}
} else {
throw new SemanticException("Currently only support range and list partition with engine type olap",
partitionDesc.getPos());
}
}
} else {
if (engineName.equalsIgnoreCase(Table.TableType.ELASTICSEARCH.name())) {
EsUtil.analyzePartitionDesc(partitionDesc);
} else if (engineName.equalsIgnoreCase(Table.TableType.ICEBERG.name())
|| engineName.equalsIgnoreCase(Table.TableType.HIVE.name())) {
if (partitionDesc != null) {
((ListPartitionDesc) partitionDesc).analyzeExternalPartitionColumns(stmt.getColumnDefs(), engineName);
}
} else {
if (partitionDesc != null) {
throw new SemanticException("Create " + engineName + " table should not contain partition desc",
partitionDesc.getPos());
}
}
}
}
public static void analyzeDistributionDesc(CreateTableStmt stmt) {
List<ColumnDef> columnDefs = stmt.getColumnDefs();
DistributionDesc distributionDesc = stmt.getDistributionDesc();
if (stmt.isOlapEngine()) {
Map<String, String> properties = stmt.getProperties();
KeysDesc keysDesc = Preconditions.checkNotNull(stmt.getKeysDesc());
if (distributionDesc == null) {
if (properties != null && properties.containsKey("colocate_with")) {
throw new SemanticException("Colocate table must specify distribution column");
}
if (keysDesc != null && keysDesc.getKeysType() == KeysType.PRIMARY_KEYS) {
distributionDesc = new HashDistributionDesc(0, keysDesc.getKeysColumnNames());
} else if (keysDesc.getKeysType() == KeysType.DUP_KEYS) {
if (ConnectContext.get().getSessionVariable().isAllowDefaultPartition()) {
if (properties == null) {
properties = Maps.newHashMap();
properties.put(PropertyAnalyzer.PROPERTIES_REPLICATION_NUM, "1");
}
distributionDesc = new HashDistributionDesc(0, Lists.newArrayList(columnDefs.get(0).getName()));
} else {
distributionDesc = new RandomDistributionDesc();
}
} else {
throw new SemanticException("Currently not support default distribution in " + keysDesc.getKeysType());
}
}
if (distributionDesc instanceof RandomDistributionDesc && keysDesc.getKeysType() != KeysType.DUP_KEYS
&& !(keysDesc.getKeysType() == KeysType.AGG_KEYS && !stmt.isHasReplace())) {
throw new SemanticException(keysDesc.getKeysType().toSql() + (stmt.isHasReplace() ? " with replace " : "")
+ " must use hash distribution", distributionDesc.getPos());
}
Set<String> columnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
columnSet.addAll(columnDefs.stream().map(ColumnDef::getName).collect(Collectors.toSet()));
distributionDesc.analyze(columnSet);
stmt.setDistributionDesc(distributionDesc);
stmt.setProperties(properties);
} else {
if (stmt.getEngineName().equalsIgnoreCase(Table.TableType.ELASTICSEARCH.name())) {
EsUtil.analyzeDistributionDesc(distributionDesc);
} else if (stmt.getEngineName().equalsIgnoreCase(Table.TableType.ICEBERG.name())
|| stmt.getEngineName().equalsIgnoreCase(Table.TableType.HIVE.name())) {
} else {
if (distributionDesc != null) {
throw new SemanticException("Create " + stmt.getEngineName() + " table should not contain distribution desc",
distributionDesc.getPos());
}
}
}
}
public static void analyzeGeneratedColumn(CreateTableStmt stmt, ConnectContext context) {
if (!stmt.isOlapEngine()) {
throw new SemanticException("Generated Column only support olap table");
}
KeysDesc keysDesc = Preconditions.checkNotNull(stmt.getKeysDesc());
if (keysDesc.getKeysType() == KeysType.AGG_KEYS) {
throw new SemanticException("Generated Column does not support AGG table");
}
if (RunMode.isSharedDataMode()) {
throw new SemanticException("Does not support generated column in shared data cluster yet");
}
final TableName tableNameObject = stmt.getDbTbl();
List<Column> columns = stmt.getColumns();
Map<String, Column> columnsMap = Maps.newHashMap();
for (Column column : columns) {
columnsMap.put(column.getName(), column);
}
boolean found = false;
for (Column column : columns) {
if (found && !column.isGeneratedColumn()) {
throw new SemanticException("All generated columns must be defined after ordinary columns");
}
if (column.isGeneratedColumn()) {
if (keysDesc.containsCol(column.getName())) {
throw new SemanticException("Generated Column can not be KEY");
}
Expr expr = column.getGeneratedColumnExpr(columns);
List<DictionaryGetExpr> dictionaryGetExprs = Lists.newArrayList();
expr.collect(DictionaryGetExpr.class, dictionaryGetExprs);
if (dictionaryGetExprs.size() != 0) {
for (DictionaryGetExpr dictionaryGetExpr : dictionaryGetExprs) {
dictionaryGetExpr.setSkipStateCheck(true);
}
}
ExpressionAnalyzer.analyzeExpression(expr, new AnalyzeState(), new Scope(RelationId.anonymous(),
new RelationFields(columns.stream().map(col -> new Field(
col.getName(), col.getType(), tableNameObject, null))
.collect(Collectors.toList()))), context);
List<FunctionCallExpr> funcs = Lists.newArrayList();
expr.collect(FunctionCallExpr.class, funcs);
for (FunctionCallExpr fn : funcs) {
if (fn.isAggregateFunction()) {
throw new SemanticException("Generated Column don't support aggregation function");
}
}
List<SlotRef> slots = Lists.newArrayList();
expr.collect(SlotRef.class, slots);
if (slots.size() != 0) {
for (SlotRef slot : slots) {
Column refColumn = columnsMap.get(slot.getColumnName());
if (refColumn == null) {
throw new SemanticException("column:" + slot.getColumnName() + " does not exist");
}
if (refColumn.isGeneratedColumn()) {
throw new SemanticException("Expression can not refers to other generated columns");
}
if (refColumn.isAutoIncrement()) {
throw new SemanticException("Expression can not refers to AUTO_INCREMENT columns");
}
}
}
if (!column.getType().matchesType(expr.getType())) {
throw new SemanticException("Illegal expression type for Generated Column " +
"Column Type: " + column.getType().toString() +
", Expression Type: " + expr.getType().toString());
}
found = true;
}
}
}
public static void analyzeIndexDefs(CreateTableStmt statement) {
List<IndexDef> indexDefs = statement.getIndexDefs();
List<Column> columns = statement.getColumns();
KeysDesc keysDesc = statement.getKeysDesc();
List<Index> indexes = new ArrayList<>();
if (CollectionUtils.isNotEmpty(indexDefs)) {
Set<String> distinct = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
Set<List<String>> distinctCol = new HashSet<>();
for (IndexDef indexDef : indexDefs) {
indexDef.analyze();
if (!statement.isOlapEngine()) {
throw new SemanticException("index only support in olap engine at current version", indexDef.getPos());
}
List<ColumnId> columnIds = new ArrayList<>(indexDef.getColumns().size());
for (String indexColName : indexDef.getColumns()) {
boolean found = false;
for (Column column : columns) {
if (column.getName().equalsIgnoreCase(indexColName)) {
indexDef.checkColumn(column, keysDesc.getKeysType());
found = true;
columnIds.add(column.getColumnId());
break;
}
}
if (!found) {
throw new SemanticException(
indexDef.getIndexName() + " column does not exist in table. invalid column: " +
indexColName,
indexDef.getPos());
}
}
indexes.add(new Index(indexDef.getIndexName(), columnIds, indexDef.getIndexType(),
indexDef.getComment(), indexDef.getProperties()));
distinct.add(indexDef.getIndexName());
distinctCol.add(indexDef.getColumns().stream().map(String::toUpperCase).collect(Collectors.toList()));
}
if (distinct.size() != indexes.size()) {
throw new SemanticException("index name must be unique", indexDefs.get(0).getPos());
}
if (distinctCol.size() != indexes.size()) {
throw new SemanticException("same index columns have multiple index name is not allowed",
indexDefs.get(0).getPos());
}
}
statement.setIndexes(indexes);
}
} | class CreateTableAnalyzer {
private static final Logger LOG = LoggerFactory.getLogger(CreateTableAnalyzer.class);
public static void analyze(CreateTableStmt statement, ConnectContext context) {
final TableName tableNameObject = statement.getDbTbl();
MetaUtils.normalizationTableName(context, tableNameObject);
final String catalogName = tableNameObject.getCatalog();
MetaUtils.checkCatalogExistAndReport(catalogName);
final String tableName = tableNameObject.getTbl();
FeNameFormat.checkTableName(tableName);
Database db = MetaUtils.getDatabase(catalogName, tableNameObject.getDb());
if (statement instanceof CreateTemporaryTableStmt) {
analyzeTemporaryTable(statement, context, catalogName, db, tableName);
} else {
if (db.getTable(tableName) != null && !statement.isSetIfNotExists()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
analyzeEngineName(statement, catalogName);
analyzeCharsetName(statement);
preCheckColumnRef(statement);
analyzeKeysDesc(statement);
analyzeSortKeys(statement);
analyzePartitionDesc(statement);
analyzeDistributionDesc(statement);
analyzeColumnRef(statement, catalogName);
if (statement.isHasGeneratedColumn()) {
analyzeGeneratedColumn(statement, context);
}
analyzeIndexDefs(statement);
}
private static void analyzeTemporaryTable(CreateTableStmt stmt, ConnectContext context,
String catalogName, Database db, String tableName) {
((CreateTemporaryTableStmt) stmt).setSessionId(context.getSessionId());
if (catalogName != null && !CatalogMgr.isInternalCatalog(catalogName)) {
throw new SemanticException("temporary table must be created under internal catalog");
}
Map<String, String> properties = stmt.getProperties();
if (properties != null) {
properties.remove(PropertyAnalyzer.PROPERTIES_COLOCATE_WITH);
}
UUID sessionId = context.getSessionId();
TemporaryTableMgr temporaryTableMgr = GlobalStateMgr.getCurrentState().getTemporaryTableMgr();
if (temporaryTableMgr.tableExists(sessionId, db.getId(), tableName) && !stmt.isSetIfNotExists()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
protected static void analyzeEngineName(CreateTableStmt stmt, String catalogName) {
String engineName = stmt.getEngineName();
if (CatalogMgr.isInternalCatalog(catalogName)) {
if (Strings.isNullOrEmpty(engineName)) {
engineName = EngineType.defaultEngine().name();
} else {
try {
engineName = EngineType.valueOf(engineName.toUpperCase()).name();
} catch (IllegalArgumentException e) {
throw new SemanticException("Unknown engine name: %s", engineName);
}
}
} else {
String catalogType = GlobalStateMgr.getCurrentState().getCatalogMgr().getCatalogType(catalogName);
if (catalogType.equalsIgnoreCase(ConnectorType.UNIFIED.getName())) {
if (Strings.isNullOrEmpty(engineName)) {
throw new SemanticException("Create table in unified catalog requires engine clause (ENGINE = ENGINE_NAME)");
}
} else {
if (Strings.isNullOrEmpty(engineName)) {
engineName = catalogType;
}
if (!engineName.equalsIgnoreCase(catalogType)) {
throw new SemanticException("Can't create %s table in the %s catalog", engineName, catalogType);
}
}
}
if ((stmt instanceof CreateTemporaryTableStmt) && !engineName.equalsIgnoreCase("olap")) {
throw new SemanticException("temporary table only support olap engine");
}
stmt.setEngineName(engineName.toLowerCase());
}
private static void analyzeCharsetName(CreateTableStmt stmt) {
String charsetName = stmt.getCharsetName();
if (Strings.isNullOrEmpty(charsetName)) {
charsetName = "utf8";
}
if (!charsetName.equalsIgnoreCase("utf8")) {
throw new SemanticException("charset name %s is not supported yet", charsetName);
}
stmt.setCharsetName(charsetName.toLowerCase());
}
private static void analyzeColumnRef(CreateTableStmt statement, String catalogName) {
String engineName = statement.getEngineName();
KeysDesc keysDesc = statement.getKeysDesc();
List<ColumnDef> columnDefs = statement.getColumnDefs();
if (columnDefs.stream().filter(ColumnDef::isAutoIncrement).count() > 1) {
throw new SemanticException("More than one AUTO_INCREMENT column defined in CREATE TABLE Statement",
statement.getPos());
}
List<Column> columns = new ArrayList<>();
for (ColumnDef columnDef : columnDefs) {
try {
columnDef.analyze(statement.isOlapEngine(), CatalogMgr.isInternalCatalog(catalogName), engineName);
} catch (AnalysisException e) {
LOG.error("Column definition analyze failed.", e);
throw new SemanticException(e.getMessage());
}
if (columnDef.isAutoIncrement()) {
if (columnDef.getType() != Type.BIGINT) {
throw new SemanticException("The AUTO_INCREMENT column must be BIGINT", columnDef.getPos());
}
}
if (engineName.equalsIgnoreCase(Table.TableType.MYSQL.name()) && columnDef.getType().isComplexType()) {
throw new SemanticException(engineName + " external table don't support complex type", columnDef.getPos());
}
if (!statement.isOlapEngine() && !engineName.equalsIgnoreCase(Table.TableType.HIVE.name())) {
columnDef.setIsKey(true);
}
if (columnDef.getAggregateType() == AggregateType.HLL_UNION && keysDesc != null
&& keysDesc.getKeysType() != KeysType.AGG_KEYS) {
throw new SemanticException("HLL_UNION must be used in AGG_KEYS", keysDesc.getPos());
}
if (columnDef.getAggregateType() == AggregateType.BITMAP_UNION && columnDef.getType().isBitmapType()
&& keysDesc != null && keysDesc.getKeysType() != KeysType.AGG_KEYS) {
throw new SemanticException("BITMAP_UNION must be used in AGG_KEYS", keysDesc.getPos());
}
Column col = columnDef.toColumn(null);
if (keysDesc != null && (keysDesc.getKeysType() == KeysType.UNIQUE_KEYS
|| keysDesc.getKeysType() == KeysType.PRIMARY_KEYS ||
keysDesc.getKeysType() == KeysType.DUP_KEYS)) {
if (!col.isKey()) {
col.setAggregationTypeImplicit(true);
}
}
columns.add(col);
}
statement.setColumns(columns);
}
private static void analyzeKeysDesc(CreateTableStmt stmt) {
KeysDesc keysDesc = stmt.getKeysDesc();
if (!stmt.isOlapEngine()) {
if (keysDesc != null) {
throw new SemanticException("Create " + stmt.getEngineName() + " table should not contain keys desc",
keysDesc.getPos());
}
return;
}
List<ColumnDef> columnDefs = stmt.getColumnDefs();
if (keysDesc == null) {
List<String> keysColumnNames = Lists.newArrayList();
if (columnDefs.stream().anyMatch(c -> c.getAggregateType() != null)) {
for (ColumnDef columnDef : columnDefs) {
if (columnDef.getAggregateType() == null) {
keysColumnNames.add(columnDef.getName());
}
}
keysDesc = new KeysDesc(KeysType.AGG_KEYS, keysColumnNames);
} else {
int keyLength = 0;
for (ColumnDef columnDef : columnDefs) {
keyLength += columnDef.getType().getIndexSize();
if (keysColumnNames.size() >= FeConstants.SHORTKEY_MAX_COLUMN_COUNT
|| keyLength > FeConstants.SHORTKEY_MAXSIZE_BYTES) {
if (keysColumnNames.size() == 0
&& columnDef.getType().getPrimitiveType().isCharFamily()) {
keysColumnNames.add(columnDef.getName());
}
break;
}
if (!columnDef.getType().canDistributedBy()) {
break;
}
if (columnDef.getType().getPrimitiveType() == PrimitiveType.VARCHAR) {
keysColumnNames.add(columnDef.getName());
break;
}
keysColumnNames.add(columnDef.getName());
}
if (columnDefs.isEmpty()) {
throw new SemanticException("Empty schema");
}
if (keysColumnNames.isEmpty()) {
throw new SemanticException("Data type of first column cannot be %s", columnDefs.get(0).getType());
}
keysDesc = new KeysDesc(KeysType.DUP_KEYS, keysColumnNames);
}
}
KeysType keysType = keysDesc.getKeysType();
if (keysType == null) {
throw new SemanticException("Keys type is null.");
}
List<String> keysColumnNames = keysDesc.getKeysColumnNames();
if (keysColumnNames == null || keysColumnNames.size() == 0) {
throw new SemanticException("The number of key columns is 0.");
}
if (keysColumnNames.size() > columnDefs.size()) {
throw new SemanticException("The number of key columns should be less than the number of columns.");
}
for (int i = 0; i < keysColumnNames.size(); ++i) {
String colName = columnDefs.get(i).getName();
if (!keysColumnNames.get(i).equalsIgnoreCase(colName)) {
String keyName = keysColumnNames.get(i);
if (columnDefs.stream().noneMatch(col -> col.getName().equalsIgnoreCase(keyName))) {
throw new SemanticException("Key column(%s) doesn't exist.", keysColumnNames.get(i));
} else {
throw new SemanticException("Key columns must be the first few columns of the schema and the order "
+ " of the key columns must be consistent with the order of the schema");
}
}
if (columnDefs.get(i).getAggregateType() != null) {
throw new SemanticException("Key column[" + colName + "] should not specify aggregate type.");
}
if (keysType == KeysType.PRIMARY_KEYS) {
ColumnDef cd = columnDefs.get(i);
cd.setPrimaryKeyNonNullable();
if (cd.isAllowNull()) {
throw new SemanticException("primary key column[" + colName + "] cannot be nullable");
}
Type t = cd.getType();
if (!(t.isBoolean() || t.isIntegerType() || t.isLargeint() || t.isVarchar() || t.isDate() ||
t.isDatetime())) {
throw new SemanticException("primary key column[" + colName + "] type not supported: " + t.toSql());
}
}
}
for (int i = keysColumnNames.size(); i < columnDefs.size(); ++i) {
if (keysType == KeysType.AGG_KEYS) {
if (columnDefs.get(i).getAggregateType() == null) {
throw new SemanticException(keysType.name() + " table should specify aggregate type for "
+ "non-key column[" + columnDefs.get(i).getName() + "]");
}
} else {
if (columnDefs.get(i).getAggregateType() != null
&& columnDefs.get(i).getAggregateType() != AggregateType.REPLACE) {
throw new SemanticException(keysType.name() + " table should not specify aggregate type for "
+ "non-key column[" + columnDefs.get(i).getName() + "]");
}
}
}
for (int i = 0; i < keysDesc.getKeysColumnNames().size(); ++i) {
columnDefs.get(i).setIsKey(true);
}
if (keysDesc.getKeysType() != KeysType.AGG_KEYS) {
AggregateType aggregateType = keysDesc.getKeysType() == KeysType.DUP_KEYS ?
AggregateType.NONE : AggregateType.REPLACE;
for (int i = keysDesc.getKeysColumnNames().size(); i < columnDefs.size(); ++i) {
columnDefs.get(i).setAggregateType(aggregateType);
}
}
stmt.setKeysDesc(keysDesc);
}
private static void analyzeSortKeys(CreateTableStmt stmt) {
if (!stmt.isOlapEngine()) {
return;
}
KeysDesc keysDesc = stmt.getKeysDesc();
KeysType keysType = keysDesc.getKeysType();
List<ColumnDef> columnDefs = stmt.getColumnDefs();
List<String> sortKeys = stmt.getSortKeys();
List<String> columnNames = columnDefs.stream().map(ColumnDef::getName).collect(Collectors.toList());
if (sortKeys != null) {
if (keysType == KeysType.PRIMARY_KEYS) {
for (String column : sortKeys) {
int idx = columnNames.indexOf(column);
if (idx == -1) {
throw new SemanticException("Unknown column '%s' does not exist", column);
}
ColumnDef cd = columnDefs.get(idx);
Type t = cd.getType();
if (!(t.isBoolean() || t.isIntegerType() || t.isLargeint() || t.isVarchar() || t.isDate() ||
t.isDatetime())) {
throw new SemanticException("sort key column[" + cd.getName() + "] type not supported: " + t.toSql());
}
}
} else if (keysType == KeysType.DUP_KEYS) {
} else if (keysType == KeysType.AGG_KEYS || keysType == KeysType.UNIQUE_KEYS) {
List<Integer> sortKeyIdxes = Lists.newArrayList();
for (String column : sortKeys) {
int idx = columnNames.indexOf(column);
if (idx == -1) {
throw new SemanticException("Unknown column '%s' does not exist", column);
}
sortKeyIdxes.add(idx);
}
List<Integer> keyColIdxes = Lists.newArrayList();
for (String column : keysDesc.getKeysColumnNames()) {
int idx = columnNames.indexOf(column);
if (idx == -1) {
throw new SemanticException("Unknown column '%s' does not exist", column);
}
keyColIdxes.add(idx);
}
boolean res = new HashSet<>(keyColIdxes).equals(new HashSet<>(sortKeyIdxes));
if (!res) {
throw new SemanticException("The sort columns of " + keysType.toSql()
+ " table must be same with key columns");
}
} else {
throw new SemanticException("Table type:" + keysType.toSql() + " does not support sort key column");
}
}
}
public static void analyzePartitionDesc(CreateTableStmt stmt) {
String engineName = stmt.getEngineName();
PartitionDesc partitionDesc = stmt.getPartitionDesc();
if (stmt.isOlapEngine()) {
if (partitionDesc != null) {
if (partitionDesc.getType() == PartitionType.RANGE || partitionDesc.getType() == PartitionType.LIST) {
try {
PartitionDescAnalyzer.analyze(partitionDesc);
partitionDesc.analyze(stmt.getColumnDefs(), stmt.getProperties());
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
}
} else if (partitionDesc instanceof ExpressionPartitionDesc) {
ExpressionPartitionDesc expressionPartitionDesc = (ExpressionPartitionDesc) partitionDesc;
try {
PartitionDescAnalyzer.analyze(partitionDesc);
expressionPartitionDesc.analyze(stmt.getColumnDefs(), stmt.getProperties());
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
}
} else {
throw new SemanticException("Currently only support range and list partition with engine type olap",
partitionDesc.getPos());
}
}
} else {
if (engineName.equalsIgnoreCase(Table.TableType.ELASTICSEARCH.name())) {
EsUtil.analyzePartitionDesc(partitionDesc);
} else if (engineName.equalsIgnoreCase(Table.TableType.ICEBERG.name())
|| engineName.equalsIgnoreCase(Table.TableType.HIVE.name())) {
if (partitionDesc != null) {
((ListPartitionDesc) partitionDesc).analyzeExternalPartitionColumns(stmt.getColumnDefs(), engineName);
}
} else {
if (partitionDesc != null) {
throw new SemanticException("Create " + engineName + " table should not contain partition desc",
partitionDesc.getPos());
}
}
}
}
public static void analyzeDistributionDesc(CreateTableStmt stmt) {
List<ColumnDef> columnDefs = stmt.getColumnDefs();
DistributionDesc distributionDesc = stmt.getDistributionDesc();
if (stmt.isOlapEngine()) {
Map<String, String> properties = stmt.getProperties();
KeysDesc keysDesc = Preconditions.checkNotNull(stmt.getKeysDesc());
if (distributionDesc == null) {
if (properties != null && properties.containsKey("colocate_with")) {
throw new SemanticException("Colocate table must specify distribution column");
}
if (keysDesc != null && keysDesc.getKeysType() == KeysType.PRIMARY_KEYS) {
distributionDesc = new HashDistributionDesc(0, keysDesc.getKeysColumnNames());
} else if (keysDesc.getKeysType() == KeysType.DUP_KEYS) {
if (ConnectContext.get().getSessionVariable().isAllowDefaultPartition()) {
if (properties == null) {
properties = Maps.newHashMap();
properties.put(PropertyAnalyzer.PROPERTIES_REPLICATION_NUM, "1");
}
distributionDesc = new HashDistributionDesc(0, Lists.newArrayList(columnDefs.get(0).getName()));
} else {
distributionDesc = new RandomDistributionDesc();
}
} else {
throw new SemanticException("Currently not support default distribution in " + keysDesc.getKeysType());
}
}
if (distributionDesc instanceof RandomDistributionDesc && keysDesc.getKeysType() != KeysType.DUP_KEYS
&& !(keysDesc.getKeysType() == KeysType.AGG_KEYS && !stmt.isHasReplace())) {
throw new SemanticException(keysDesc.getKeysType().toSql() + (stmt.isHasReplace() ? " with replace " : "")
+ " must use hash distribution", distributionDesc.getPos());
}
if (distributionDesc.getBuckets() > Config.max_bucket_number_per_partition && stmt.isOlapEngine()
&& stmt.getPartitionDesc() != null && stmt.getPartitionDesc().getType() != PartitionType.UNPARTITIONED) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TOO_MANY_BUCKETS, Config.max_bucket_number_per_partition);
}
Set<String> columnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
columnSet.addAll(columnDefs.stream().map(ColumnDef::getName).collect(Collectors.toSet()));
distributionDesc.analyze(columnSet);
stmt.setDistributionDesc(distributionDesc);
stmt.setProperties(properties);
} else {
if (stmt.getEngineName().equalsIgnoreCase(Table.TableType.ELASTICSEARCH.name())) {
EsUtil.analyzeDistributionDesc(distributionDesc);
} else if (stmt.getEngineName().equalsIgnoreCase(Table.TableType.ICEBERG.name())
|| stmt.getEngineName().equalsIgnoreCase(Table.TableType.HIVE.name())) {
} else {
if (distributionDesc != null) {
throw new SemanticException("Create " + stmt.getEngineName() + " table should not contain distribution desc",
distributionDesc.getPos());
}
}
}
}
public static void analyzeGeneratedColumn(CreateTableStmt stmt, ConnectContext context) {
if (!stmt.isOlapEngine()) {
throw new SemanticException("Generated Column only support olap table");
}
KeysDesc keysDesc = Preconditions.checkNotNull(stmt.getKeysDesc());
if (keysDesc.getKeysType() == KeysType.AGG_KEYS) {
throw new SemanticException("Generated Column does not support AGG table");
}
if (RunMode.isSharedDataMode()) {
throw new SemanticException("Does not support generated column in shared data cluster yet");
}
final TableName tableNameObject = stmt.getDbTbl();
List<Column> columns = stmt.getColumns();
Map<String, Column> columnsMap = Maps.newHashMap();
for (Column column : columns) {
columnsMap.put(column.getName(), column);
}
boolean found = false;
for (Column column : columns) {
if (found && !column.isGeneratedColumn()) {
throw new SemanticException("All generated columns must be defined after ordinary columns");
}
if (column.isGeneratedColumn()) {
if (keysDesc.containsCol(column.getName())) {
throw new SemanticException("Generated Column can not be KEY");
}
Expr expr = column.getGeneratedColumnExpr(columns);
List<DictionaryGetExpr> dictionaryGetExprs = Lists.newArrayList();
expr.collect(DictionaryGetExpr.class, dictionaryGetExprs);
if (dictionaryGetExprs.size() != 0) {
for (DictionaryGetExpr dictionaryGetExpr : dictionaryGetExprs) {
dictionaryGetExpr.setSkipStateCheck(true);
}
}
ExpressionAnalyzer.analyzeExpression(expr, new AnalyzeState(), new Scope(RelationId.anonymous(),
new RelationFields(columns.stream().map(col -> new Field(
col.getName(), col.getType(), tableNameObject, null))
.collect(Collectors.toList()))), context);
List<FunctionCallExpr> funcs = Lists.newArrayList();
expr.collect(FunctionCallExpr.class, funcs);
for (FunctionCallExpr fn : funcs) {
if (fn.isAggregateFunction()) {
throw new SemanticException("Generated Column don't support aggregation function");
}
}
List<SlotRef> slots = Lists.newArrayList();
expr.collect(SlotRef.class, slots);
if (slots.size() != 0) {
for (SlotRef slot : slots) {
Column refColumn = columnsMap.get(slot.getColumnName());
if (refColumn == null) {
throw new SemanticException("column:" + slot.getColumnName() + " does not exist");
}
if (refColumn.isGeneratedColumn()) {
throw new SemanticException("Expression can not refers to other generated columns");
}
if (refColumn.isAutoIncrement()) {
throw new SemanticException("Expression can not refers to AUTO_INCREMENT columns");
}
}
}
if (!column.getType().matchesType(expr.getType())) {
throw new SemanticException("Illegal expression type for Generated Column " +
"Column Type: " + column.getType().toString() +
", Expression Type: " + expr.getType().toString());
}
found = true;
}
}
}
public static void analyzeIndexDefs(CreateTableStmt statement) {
List<IndexDef> indexDefs = statement.getIndexDefs();
List<Column> columns = statement.getColumns();
KeysDesc keysDesc = statement.getKeysDesc();
List<Index> indexes = new ArrayList<>();
if (CollectionUtils.isNotEmpty(indexDefs)) {
Set<String> distinct = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
Set<List<String>> distinctCol = new HashSet<>();
for (IndexDef indexDef : indexDefs) {
indexDef.analyze();
if (!statement.isOlapEngine()) {
throw new SemanticException("index only support in olap engine at current version", indexDef.getPos());
}
List<ColumnId> columnIds = new ArrayList<>(indexDef.getColumns().size());
for (String indexColName : indexDef.getColumns()) {
boolean found = false;
for (Column column : columns) {
if (column.getName().equalsIgnoreCase(indexColName)) {
indexDef.checkColumn(column, keysDesc.getKeysType());
found = true;
columnIds.add(column.getColumnId());
break;
}
}
if (!found) {
throw new SemanticException(
indexDef.getIndexName() + " column does not exist in table. invalid column: " +
indexColName,
indexDef.getPos());
}
}
indexes.add(new Index(indexDef.getIndexName(), columnIds, indexDef.getIndexType(),
indexDef.getComment(), indexDef.getProperties()));
distinct.add(indexDef.getIndexName());
distinctCol.add(indexDef.getColumns().stream().map(String::toUpperCase).collect(Collectors.toList()));
}
if (distinct.size() != indexes.size()) {
throw new SemanticException("index name must be unique", indexDefs.get(0).getPos());
}
if (distinctCol.size() != indexes.size()) {
throw new SemanticException("same index columns have multiple index name is not allowed",
indexDefs.get(0).getPos());
}
}
statement.setIndexes(indexes);
}
} |
Can you explain why the deadlock is triggered? I'm missing some context. | public void reserveSegments(int numberOfSegmentsToReserve) throws IOException {
checkArgument(
numberOfSegmentsToReserve <= minNumberOfMemorySegments,
"Can not reserve more segments than number of minimum segments.");
CompletableFuture<?> toNotify = null;
int numSegmentsNeeded;
synchronized (availableMemorySegments) {
checkDestroyed();
numSegmentsNeeded = numberOfSegmentsToReserve - numberOfRequestedMemorySegments;
}
if (numSegmentsNeeded > 0) {
List<MemorySegment> segments =
networkBufferPool.requestPooledMemorySegmentsBlocking(numSegmentsNeeded);
synchronized (availableMemorySegments) {
availableMemorySegments.addAll(segments);
toNotify = availabilityHelper.getUnavailableToResetAvailable();
}
}
mayNotifyAvailable(toNotify);
} | if (numSegmentsNeeded > 0) { | public void reserveSegments(int numberOfSegmentsToReserve) throws IOException {
checkArgument(
numberOfSegmentsToReserve <= minNumberOfMemorySegments,
"Can not reserve more segments than number of minimum segments.");
CompletableFuture<?> toNotify = null;
int numSegmentsNeeded;
synchronized (availableMemorySegments) {
checkDestroyed();
numSegmentsNeeded = numberOfSegmentsToReserve - numberOfRequestedMemorySegments;
}
if (numSegmentsNeeded > 0) {
List<MemorySegment> segments =
networkBufferPool.requestPooledMemorySegmentsBlocking(numSegmentsNeeded);
synchronized (availableMemorySegments) {
availableMemorySegments.addAll(segments);
toNotify = availabilityHelper.getUnavailableToResetAvailable();
}
}
mayNotifyAvailable(toNotify);
} | class vs. locks being acquired during calls to external
* code inside this class, e.g. with {@code
* org.apache.flink.runtime.io.network.partition.consumer.BufferManager | class vs. locks being acquired during calls to external
* code inside this class, e.g. with {@code
* org.apache.flink.runtime.io.network.partition.consumer.BufferManager |
Extract the logic out to a method. Can get rid of the nested if/else ifs. | public BType getActualTypeForOtherUnaryExpr(BLangUnaryExpr unaryExpr, AnalyzerData data) {
BType actualType = symTable.semanticError;
BType newExpectedType = data.expType;
LinkedHashSet<BType> basicNumericTypes;
BType referredType = types.getReferredType(data.expType);
int referredTypeTag = referredType.tag;
boolean isAddOrSubOperator = OperatorKind.SUB.equals(unaryExpr.operator) ||
OperatorKind.ADD.equals(unaryExpr.operator);
if (OperatorKind.SUB.equals(unaryExpr.operator)) {
if (data.expType == symTable.noType) {
newExpectedType = symTable.noType;
} else if (TypeTags.isIntegerTypeTag(referredTypeTag) || referredTypeTag == TypeTags.FLOAT
|| referredTypeTag == TypeTags.DECIMAL) {
BType numericTypeUnion = BUnionType.create(null,
symTable.intType, symTable.floatType, symTable.decimalType);
if (TypeTags.isIntegerTypeTag(referredTypeTag)) {
newExpectedType =
types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionTestContext(),
numericTypeUnion, symTable.intType, data.env);
} else {
newExpectedType =
types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionTestContext(),
numericTypeUnion, referredType, data.env);
}
} else if (referredTypeTag == TypeTags.FINITE || referredTypeTag == TypeTags.UNION) {
basicNumericTypes = referredTypeTag == TypeTags.FINITE ?
getBasicNumericTypesInFiniteType(referredType) : getBasicNumericTypesInUnionType(referredType);
if (basicNumericTypes.size() == 1) {
newExpectedType = basicNumericTypes.iterator().next();
} else if (basicNumericTypes.size() > 1) {
newExpectedType = BUnionType.create(null, basicNumericTypes);
}
} else if (referredTypeTag == TypeTags.JSON || referredTypeTag == TypeTags.ANYDATA ||
referredTypeTag == TypeTags.ANY) {
newExpectedType = BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType);
}
}
newExpectedType = silentTypeCheckExpr(unaryExpr.expr, newExpectedType, data);
BType exprType;
if (newExpectedType != symTable.semanticError) {
exprType = isAddOrSubOperator ? checkExpr(unaryExpr.expr, newExpectedType, data) :
checkExpr(unaryExpr.expr, data);
} else {
exprType = isAddOrSubOperator ? checkExpr(unaryExpr.expr, data.expType, data) :
checkExpr(unaryExpr.expr, data);
}
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
symbol = symResolver.getUnaryOpsForTypeSets(unaryExpr.operator, exprType);
}
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
if (isAddOrSubOperator && exprType != symTable.semanticError &&
unaryExpr.expr.getKind() == NodeKind.NUMERIC_LITERAL && (referredTypeTag == TypeTags.FINITE ||
referredTypeTag == TypeTags.UNION)) {
if (referredTypeTag == TypeTags.FINITE) {
actualType = createFiniteTypeForNumericUnaryExpr(unaryExpr, data);
} else {
if (silentCompatibleFiniteMembersInUnionTypeCheck(unaryExpr, (BUnionType) referredType, data)) {
return createFiniteTypeForNumericUnaryExpr(unaryExpr, data);
}
LinkedHashSet<BType> intTypesInUnion = getIntSubtypesInUnionType((BUnionType) referredType);
if (!intTypesInUnion.isEmpty()) {
BType newReferredType = BUnionType.create(null, intTypesInUnion);
BType tempActualType = checkCompatibilityWithConstructedNumericLiteral(unaryExpr, newReferredType,
data);
if (tempActualType != symTable.semanticError) {
return tempActualType;
}
}
}
} else if (isAddOrSubOperator && exprType != symTable.semanticError &&
TypeTags.isIntegerTypeTag(referredTypeTag) && referredTypeTag != TypeTags.INT) {
BType tempActualType = checkCompatibilityWithConstructedNumericLiteral(unaryExpr, referredType, data);
if (tempActualType != symTable.semanticError) {
return tempActualType;
}
}
return actualType;
} | if (data.expType == symTable.noType) { | public BType getActualTypeForOtherUnaryExpr(BLangUnaryExpr unaryExpr, AnalyzerData data) {
BType actualType = symTable.semanticError;
BType newExpectedType = data.expType;
BType referredType = types.getReferredType(newExpectedType);
int referredTypeTag = referredType.tag;
boolean isAddOrSubOperator = OperatorKind.SUB.equals(unaryExpr.operator) ||
OperatorKind.ADD.equals(unaryExpr.operator);
if (OperatorKind.SUB.equals(unaryExpr.operator)) {
newExpectedType = setExpectedTypeForSubtractionOperator(data);
}
newExpectedType = silentTypeCheckExpr(unaryExpr.expr, newExpectedType, data);
BType exprType;
if (newExpectedType != symTable.semanticError) {
exprType = isAddOrSubOperator ? checkExpr(unaryExpr.expr, newExpectedType, data) :
checkExpr(unaryExpr.expr, data);
} else {
exprType = isAddOrSubOperator ? checkExpr(unaryExpr.expr, data.expType, data) :
checkExpr(unaryExpr.expr, data);
}
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
symbol = symResolver.getUnaryOpsForTypeSets(unaryExpr.operator, exprType);
}
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
if (isAddOrSubOperator && exprType != symTable.semanticError && types.isExpressionInUnaryValid(unaryExpr.expr)
&& (referredTypeTag == TypeTags.FINITE || referredTypeTag == TypeTags.UNION)) {
if (referredTypeTag == TypeTags.FINITE) {
actualType = createFiniteTypeForNumericUnaryExpr(unaryExpr, data);
} else {
if (silentCompatibleFiniteMembersInUnionTypeCheck(unaryExpr, (BUnionType) referredType, data)) {
return createFiniteTypeForNumericUnaryExpr(unaryExpr, data);
}
LinkedHashSet<BType> intTypesInUnion = getIntSubtypesInUnionType((BUnionType) referredType);
if (!intTypesInUnion.isEmpty()) {
BType newReferredType = BUnionType.create(null, intTypesInUnion);
BType tempActualType = checkCompatibilityWithConstructedNumericLiteral(unaryExpr, newReferredType,
data);
if (tempActualType != symTable.semanticError) {
return tempActualType;
}
}
}
} else if (isAddOrSubOperator && exprType != symTable.semanticError &&
TypeTags.isIntegerTypeTag(referredTypeTag) && referredTypeTag != TypeTags.INT) {
BType tempActualType = checkCompatibilityWithConstructedNumericLiteral(unaryExpr, referredType, data);
if (tempActualType != symTable.semanticError) {
return tempActualType;
}
}
return actualType;
} | class TypeChecker extends SimpleBLangNodeAnalyzer<TypeChecker.AnalyzerData> {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private final BLangAnonymousModelHelper anonymousModelHelper;
private final BLangDiagnosticLog dlog;
private final BLangMissingNodesHelper missingNodesHelper;
private final Names names;
private final NodeCloner nodeCloner;
private final SemanticAnalyzer semanticAnalyzer;
private final SymbolEnter symbolEnter;
private final SymbolResolver symResolver;
private final SymbolTable symTable;
private final TypeNarrower typeNarrower;
private final TypeParamAnalyzer typeParamAnalyzer;
private final Types types;
private final Unifier unifier;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.unifier = new Unifier();
}
private BType checkExpr(BLangExpression expr, SymbolEnv env, AnalyzerData data) {
return checkExpr(expr, env, symTable.noType, data);
}
private BType checkExpr(BLangExpression expr, AnalyzerData data) {
return checkExpr(expr, data.env, symTable.noType, data);
}
private BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, AnalyzerData data) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data);
}
private BType checkExpr(BLangExpression expr, BType expType, AnalyzerData data) {
return checkExpr(expr, data.env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType, new Stack<>());
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, Stack<SymbolEnv> prevEnvs) {
return checkExpr(expr, env, symTable.noType, prevEnvs);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, Stack<SymbolEnv> prevEnvs) {
final AnalyzerData data = new AnalyzerData();
data.env = env;
data.prevEnvs = prevEnvs;
data.queryFinalClauses = new Stack<>();
data.queryEnvs = new Stack<>();
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data);
}
@Override
public void analyzeNode(BLangNode node, AnalyzerData data) {
}
@Override
public void visit(BLangPackage node, AnalyzerData data) {
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode,
AnalyzerData data) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = data.env;
BType preExpType = data.expType;
DiagnosticCode preDiagCode = data.diagCode;
data.env = env;
data.diagCode = diagCode;
data.expType = expType;
data.isTypeChecked = true;
BType referredExpType = Types.getReferredType(expType);
if (referredExpType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) referredExpType).effectiveType;
}
expr.expectedType = expType;
expr.accept(this, data);
BType resultRefType = Types.getReferredType(data.resultType);
if (resultRefType.tag == TypeTags.INTERSECTION) {
data.resultType = ((BIntersectionType) resultRefType).effectiveType;
}
expr.setTypeCheckedType(data.resultType);
expr.typeChecked = data.isTypeChecked;
data.env = prevEnv;
data.expType = preExpType;
data.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr, data);
return data.resultType;
}
private void analyzeObjectConstructor(BLangNode node, SymbolEnv env, AnalyzerData data) {
if (!data.nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
}
private void validateAndSetExprExpectedType(BLangExpression expr, AnalyzerData data) {
if (data.resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
Types.getReferredType(expr.expectedType).tag == TypeTags.MAP
&& Types.getReferredType(expr.getBType()).tag == TypeTags.RECORD) {
return;
}
expr.expectedType = data.resultType;
}
public void visit(BLangLiteral literalExpr, AnalyzerData data) {
BType literalType = setLiteralValueAndGetType(literalExpr, data.expType, data);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
data.resultType = types.checkType(literalExpr, literalType, data.expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess, AnalyzerData data) {
checkXMLNamespacePrefixes(xmlElementAccess.filters, data);
checkExpr(xmlElementAccess.expr, symTable.xmlType, data);
data.resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, data.expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation, AnalyzerData data) {
checkXMLNamespacePrefixes(xmlNavigation.filters, data);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, symTable.intType, data);
}
BType exprType = checkExpr(xmlNavigation.expr, symTable.xmlType, data);
if (Types.getReferredType(exprType).tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, data.expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
data.resultType = symTable.xmlType;
} else {
data.resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters, AnalyzerData data) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(data.env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private int getPreferredMemberTypeTag(BFiniteType finiteType) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
int typeTag = Types.getReferredType(valueExpr.getBType()).tag;
if (typeTag > TypeTags.DECIMAL) {
continue;
}
for (int i = TypeTags.INT; i <= TypeTags.DECIMAL; i++) {
if (typeTag == i) {
return i;
}
}
}
return TypeTags.NONE;
}
private BType getFiniteTypeMatchWithIntType(BLangLiteral literalExpr, BFiniteType finiteType, AnalyzerData data) {
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
setLiteralValueForFiniteType(literalExpr, symTable.intType, data);
return symTable.intType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
setLiteralValueForFiniteType(literalExpr, symTable.byteType, data);
return symTable.byteType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
if (!(literalExpr.value instanceof Long)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType, data);
return symTable.floatType;
}
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
if (!(literalExpr.value instanceof Long) && !(literalExpr.value instanceof Double)) {
setLiteralValueForFiniteType(literalExpr, symTable.decimalType, data);
return symTable.decimalType;
}
} else {
for (int tag = TypeTags.SIGNED32_INT; tag <= TypeTags.UNSIGNED8_INT; tag++) {
if (literalAssignableToFiniteType(literalExpr, finiteType, tag)) {
setLiteralValueForFiniteType(literalExpr, symTable.getTypeFromTag(tag), data);
return symTable.getTypeFromTag(tag);
}
}
}
return symTable.noType;
}
private BType getFiniteTypeMatchWithIntLiteral(BLangLiteral literalExpr, BFiniteType finiteType,
Object literalValue, AnalyzerData data) {
BType intLiteralType = getFiniteTypeMatchWithIntType(literalExpr, finiteType, data);
if (intLiteralType != symTable.noType) {
return intLiteralType;
}
int typeTag = getPreferredMemberTypeTag(finiteType);
if (typeTag == TypeTags.NONE) {
return symTable.intType;
}
if (literalAssignableToFiniteType(literalExpr, finiteType, typeTag)) {
BType type = symTable.getTypeFromTag(typeTag);
setLiteralValueForFiniteType(literalExpr, type, data);
literalExpr.value = String.valueOf(literalValue);
return type;
}
if (literalValue instanceof Double) {
return symTable.floatType;
}
if (literalValue instanceof String) {
return symTable.decimalType;
} else {
return symTable.intType;
}
}
private BType silentIntTypeCheck(BLangLiteral literalExpr, Object literalValue, BType expType,
AnalyzerData data) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = getIntegerLiteralType(nodeCloner.cloneNode(literalExpr), literalValue, expType,
data);
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return exprCompatibleType;
}
private BType silentCompatibleLiteralTypeCheck(BFiniteType finiteType, BLangLiteral literalExpr,
Object literalValue, AnalyzerData data) {
BType resIntType = symTable.semanticError;
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
resIntType = silentIntTypeCheck(literalExpr, literalValue, valueExpr.getBType(), data);
if (resIntType != symTable.semanticError) {
return resIntType;
}
}
return resIntType;
}
private BType checkIfLiteralIsOutOfRange(BFiniteType finiteType, BLangLiteral literalExpr, Object literalValue,
AnalyzerData data) {
BType compatibleType = silentCompatibleLiteralTypeCheck(finiteType, literalExpr, literalValue, data);
if (compatibleType == symTable.semanticError) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.OUT_OF_RANGE, literalExpr.originalValue,
literalExpr.getBType());
}
return compatibleType;
}
private BType getIntegerLiteralType(BLangLiteral literalExpr, Object literalValue, BType expType,
AnalyzerData data) {
BType expectedType = Types.getReferredType(expType);
if (expectedType.tag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(expectedType.tag)) {
BType resultType = getIntLiteralType(expType, literalValue, data);
if (resultType == symTable.semanticError) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.OUT_OF_RANGE, literalExpr.originalValue, expType);
}
return resultType;
} else if (expectedType.tag == TypeTags.FLOAT) {
if (literalValue instanceof String) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.OUT_OF_RANGE, literalExpr.originalValue,
expectedType);
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
if (literalValue instanceof Double) {
literalExpr.value = ((Double) literalValue).doubleValue();
} else {
literalExpr.value = ((Long) literalValue).doubleValue();
}
return symTable.floatType;
} else if (expectedType.tag == TypeTags.DECIMAL) {
literalExpr.value = String.valueOf(literalValue);
return symTable.decimalType;
} else if (expectedType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) expectedType;
BType compatibleType = checkIfLiteralIsOutOfRange(finiteType, literalExpr, literalValue, data);
if (compatibleType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return compatibleType;
} else {
return getFiniteTypeMatchWithIntLiteral(literalExpr, finiteType, literalValue, data);
}
} else if (expectedType.tag == TypeTags.UNION) {
for (BType memType : types.getAllTypes(expectedType, true)) {
BType memberRefType = Types.getReferredType(memType);
if (TypeTags.isIntegerTypeTag(memberRefType.tag) || memberRefType.tag == TypeTags.BYTE) {
BType intLiteralType = getIntLiteralType(memType, literalValue, data);
if (intLiteralType == memberRefType) {
return intLiteralType;
}
} else if (memberRefType.tag == TypeTags.JSON || memberRefType.tag == TypeTags.ANYDATA ||
memberRefType.tag == TypeTags.ANY) {
if (literalValue instanceof Double) {
return symTable.floatType;
}
if (literalValue instanceof String) {
return symTable.decimalType;
}
return symTable.intType;
}
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expectedType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType, data);
if (literalExpr.isFiniteContext) {
return setType;
}
}
BType finiteTypeMatchingByte = getFiniteTypeWithValuesOfSingleType((BUnionType) expectedType,
symTable.byteType);
if (finiteTypeMatchingByte != symTable.semanticError) {
finiteType = finiteTypeMatchingByte;
BType setType = setLiteralValueAndGetType(literalExpr, finiteType, data);
if (literalExpr.isFiniteContext) {
return setType;
}
}
Set<BType> memberTypes = ((BUnionType) expectedType).getMemberTypes();
return getTypeMatchingFloatOrDecimal(finiteType, memberTypes, literalExpr, (BUnionType) expectedType, data);
}
if (!(literalValue instanceof Long)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.OUT_OF_RANGE, literalExpr.originalValue,
literalExpr.getBType());
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
return symTable.intType;
}
private BType getTypeOfLiteralWithFloatDiscriminator(BLangLiteral literalExpr, Object literalValue,
BType expType, AnalyzerData data) {
String numericLiteral = NumericLiteralSupport.stripDiscriminator(String.valueOf(literalValue));
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
literalExpr.value = Double.parseDouble(numericLiteral);
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType, data);
return symTable.floatType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) referredType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType, data);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.floatType;
}
private BType getTypeOfLiteralWithDecimalDiscriminator(BLangLiteral literalExpr, Object literalValue,
BType expType, AnalyzerData data) {
literalExpr.value = NumericLiteralSupport.stripDiscriminator(String.valueOf(literalValue));
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
setLiteralValueForFiniteType(literalExpr, symTable.decimalType, data);
return symTable.decimalType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType, data);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.decimalType;
}
private BType getTypeOfDecimalFloatingPointLiteral(BLangLiteral literalExpr, Object literalValue, BType expType,
AnalyzerData data) {
BType expectedType = Types.getReferredType(expType);
String numericLiteral = String.valueOf(literalValue);
if (expectedType.tag == TypeTags.DECIMAL) {
return symTable.decimalType;
} else if (expectedType.tag == TypeTags.FLOAT) {
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
return symTable.floatType;
} else if (expectedType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) expectedType;
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
if (literalAssignableToFiniteType(literalExpr, finiteType, tag)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.getTypeFromTag(tag), data);
setLiteralValueForFiniteType(literalExpr, valueType, data);
return valueType;
}
}
} else if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
BType unionMember =
getAndSetAssignableUnionMember(literalExpr, unionType, symTable.getTypeFromTag(tag), data);
if (unionMember == symTable.floatType && !types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
data.resultType = symTable.semanticError;
return symTable.semanticError;
} else if (unionMember != symTable.noType) {
return unionMember;
}
}
}
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
return symTable.floatType;
}
private BType getTypeOfHexFloatingPointLiteral(BLangLiteral literalExpr, Object literalValue, BType expType,
AnalyzerData data) {
String numericLiteral = String.valueOf(literalValue);
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
literalExpr.value = Double.parseDouble(numericLiteral);
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType, data);
return symTable.floatType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) referredType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType, data);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.floatType;
}
public BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType, AnalyzerData data) {
Object literalValue = literalExpr.value;
BType expectedType = Types.getReferredType(expType);
if (literalExpr.getKind() == NodeKind.NUMERIC_LITERAL) {
NodeKind kind = ((BLangNumericLiteral) literalExpr).kind;
if (kind == NodeKind.INTEGER_LITERAL) {
return getIntegerLiteralType(literalExpr, literalValue, expectedType, data);
} else if (kind == NodeKind.DECIMAL_FLOATING_POINT_LITERAL) {
if (NumericLiteralSupport.isFloatDiscriminated(literalExpr.originalValue)) {
return getTypeOfLiteralWithFloatDiscriminator(literalExpr, literalValue, expectedType, data);
} else if (NumericLiteralSupport.isDecimalDiscriminated(literalExpr.originalValue)) {
return getTypeOfLiteralWithDecimalDiscriminator(literalExpr, literalValue, expectedType, data);
} else {
return getTypeOfDecimalFloatingPointLiteral(literalExpr, literalValue, expectedType, data);
}
} else {
return getTypeOfHexFloatingPointLiteral(literalExpr, literalValue, expectedType, data);
}
}
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expectedType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expectedType.tag == TypeTags.UNION) {
Set<BType> memberTypes = new HashSet<>(types.getAllTypes(expectedType, true));
for (BType memType : memberTypes) {
memType = Types.getReferredType(memType);
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType, data);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType, data);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType, data);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expectedType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType, data);
return literalType;
}
} else {
if (expectedType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(expectedType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType, data);
return literalType;
}
} else if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
boolean foundMember = types.getAllTypes(unionType, true)
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType, data);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getTypeMatchingFloatOrDecimal(BType finiteType, Set<BType> memberTypes, BLangLiteral literalExpr,
BUnionType expType, AnalyzerData data) {
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
if (finiteType == symTable.semanticError) {
BType type = symTable.getTypeFromTag(tag);
for (BType memType : memberTypes) {
if (memType.tag == tag) {
return setLiteralValueAndGetType(literalExpr, type, data);
}
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, type);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType, data);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
}
return symTable.intType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType,
AnalyzerData data) {
List<BType> members = types.getAllTypes(expType, true);
Set<BType> memberTypes = new HashSet<>();
members.forEach(member -> memberTypes.addAll(members));
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return desiredType;
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, desiredType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType, data);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type, AnalyzerData data) {
types.setImplicitCastExpr(literalExpr, type, data.expType);
data.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = types.getAllTypes(unionType, true).stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(BType expType, Object literalValue, AnalyzerData data) {
if (!(literalValue instanceof Long)) {
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
case TypeTags.TYPEREFDESC:
return getIntLiteralType(Types.getReferredType(expType), literalValue, data);
default:
}
return symTable.intType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor, AnalyzerData data) {
BType expType = data.expType;
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType, data);
data.resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
data.resultType = checkListConstructorCompatibility(expType, listConstructor, data);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr, AnalyzerData data) {
BType expType = data.expType;
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), data.env,
data);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
data.resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr, data);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType, data)) {
data.resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType, data)) {
return;
}
data.resultType = tableType;
return;
}
BType applicableExpType = Types.getReferredType(expType);
applicableExpType = applicableExpType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) applicableExpType).effectiveType : applicableExpType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (data.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, ((BTableType) applicableExpType).constraint, data);
if (recordType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) {
if (validateMapConstraintTable(applicableExpType)) {
data.resultType = symTable.semanticError;
return;
}
data.resultType = expType;
return;
}
if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList, data) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType, data))) {
data.resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType, data)) {
return;
}
if (!expectedTableType.fieldNameList.isEmpty() && tableType.fieldNameList.isEmpty()) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
data.resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (data.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, memType, data);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (data.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog, data));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
data.resultType = checkExpr(tableConstructorExpr, matchingTypes.get(0), data);
return;
}
data.resultType = symTable.semanticError;
} else {
data.resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog, AnalyzerData data) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), data.env, data);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog, data), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType,
AnalyzerData data) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList, data))) {
data.resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr,
AnalyzerData data) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
List<BType> restFieldTypes = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
LinkedHashMap<String, List<BField>> fieldNameToFields = new LinkedHashMap<>();
for (BType memType : memTypes) {
BRecordType member = (BRecordType) memType;
for (Map.Entry<String, BField> entry : member.fields.entrySet()) {
String key = entry.getKey();
BField field = entry.getValue();
if (fieldNameToFields.containsKey(key)) {
fieldNameToFields.get(key).add(field);
} else {
fieldNameToFields.put(key, new ArrayList<>() {{
add(field);
}});
}
}
if (!member.sealed) {
restFieldTypes.add(member.restFieldType);
}
}
LinkedHashSet<BField> inferredFields = new LinkedHashSet<>();
int memTypesSize = memTypes.size();
for (Map.Entry<String, List<BField>> entry : fieldNameToFields.entrySet()) {
String fieldName = entry.getKey();
List<BField> fields = entry.getValue();
List<BType> types = new ArrayList<>();
for (BField field : fields) {
types.add(field.getType());
}
for (BType memType : memTypes) {
BRecordType bMemType = (BRecordType) memType;
if (bMemType.sealed || bMemType.fields.containsKey(fieldName)) {
continue;
}
BType restFieldType = bMemType.restFieldType;
types.add(restFieldType);
}
BField resultantField = createFieldWithType(fields.get(0), types);
boolean isOptional = hasOptionalFields(fields) || fields.size() != memTypesSize;
if (isOptional) {
resultantField.symbol.flags = Flags.OPTIONAL;
} else if (keySpecifierFieldNames.contains(fieldName)) {
resultantField.symbol.flags = Flags.REQUIRED | Flags.READONLY;
} else {
resultantField.symbol.flags = Flags.REQUIRED;
}
inferredFields.add(resultantField);
}
return createTableConstraintRecordType(inferredFields, restFieldTypes, tableConstructorExpr.pos, data);
}
/**
* Create a new {@code BField} out of existing {@code BField}, while changing its type.
* The new type is derived from the given list of bTypes.
*
* @param field - existing {@code BField}
* @param bTypes - list of bTypes
* @return a {@code BField}
*/
private BField createFieldWithType(BField field, List<BType> bTypes) {
BType resultantType = getResultantType(bTypes);
BVarSymbol originalSymbol = field.symbol;
BVarSymbol fieldSymbol = new BVarSymbol(originalSymbol.flags, originalSymbol.name, originalSymbol.pkgID,
resultantType, originalSymbol.owner, originalSymbol.pos, VIRTUAL);
return new BField(field.name, field.pos, fieldSymbol);
}
/**
* Get the resultant type from a {@code List<BType>}.
*
* @param bTypes bType list (size > 0)
* @return {@code BUnionType} if effective members in list is > 1. {@code BType} Otherwise.
*/
private BType getResultantType(List<BType> bTypes) {
LinkedHashSet<BType> bTypeSet = new LinkedHashSet<>(bTypes);
List<BType> flattenBTypes = new ArrayList<>(bTypes.size());
addFlattenMemberTypes(flattenBTypes, bTypeSet);
return getRepresentativeBroadType(flattenBTypes);
}
private void addFlattenMemberTypes(List<BType> flattenBTypes, LinkedHashSet<BType> bTypes) {
for (BType memberType : bTypes) {
BType bType;
switch (memberType.tag) {
case TypeTags.UNION:
addFlattenMemberTypes(flattenBTypes, ((BUnionType) memberType).getMemberTypes());
continue;
case TypeTags.TYPEREFDESC:
BType constraint = Types.getReferredType(memberType);
if (constraint.tag == TypeTags.UNION) {
addFlattenMemberTypes(flattenBTypes, ((BUnionType) constraint).getMemberTypes());
continue;
}
bType = constraint;
break;
default:
bType = memberType;
break;
}
flattenBTypes.add(bType);
}
}
private boolean hasOptionalFields(List<BField> fields) {
for (BField field : fields) {
if (field.symbol.getFlags().contains(Flag.OPTIONAL)) {
return true;
}
}
return false;
}
private BRecordType createTableConstraintRecordType(Set<BField> inferredFields, List<BType> restFieldTypes,
Location pos, AnalyzerData data) {
PackageID pkgID = data.env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL, data);
for (BField field : inferredFields) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = inferredFields.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, data.env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, data.env);
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else {
recordType.restFieldType = getResultantType(restFieldTypes);
}
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType, AnalyzerData data) {
BType constraint = Types.getReferredType(tableType.constraint);
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
data.resultType = symTable.semanticError;
return false;
}
return true;
}
private boolean validateKeySpecifierInTableConstructor(BTableType tableType,
List<BLangRecordLiteral> recordLiterals, AnalyzerData data) {
List<String> fieldNameList = tableType.fieldNameList;
if (!fieldNameList.isEmpty()) {
return validateTableKeyValue(fieldNameList, recordLiterals, data);
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals, AnalyzerData data) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangExpression recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField)) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
data.resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangExpression getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
if (recordField.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField.valueExpr;
}
} else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (fieldName.equals(((BLangRecordVarNameField) recordField).variableName.value)) {
return (BLangRecordLiteral.BLangRecordVarNameField) recordField;
}
} else if (recordField.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) recordField;
BType spreadOpExprType = Types.getReferredType(spreadOperatorField.expr.getBType());
if (spreadOpExprType.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) spreadOpExprType;
for (BField recField : recordType.fields.values()) {
if (fieldName.equals(recField.name.value)) {
return recordLiteral;
}
}
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
return true;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
return true;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
return true;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
return true;
}
}
return false;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType,
AnalyzerData data) {
BType constraintType = Types.getReferredType(tableType.constraint);
List<String> fieldNameList = new ArrayList<>();
boolean isKeySpecifierEmpty = tableConstructorExpr.tableKeySpecifier == null;
if (!isKeySpecifierEmpty) {
fieldNameList.addAll(getTableKeyNameList(tableConstructorExpr.tableKeySpecifier));
if (tableType.fieldNameList.isEmpty() && validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
data.resultType = symTable.semanticError;
return false;
}
if (!tableType.fieldNameList.isEmpty() && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
data.resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
keyTypeConstraint = Types.getReferredType(keyTypeConstraint);
List<BType> memberTypes = new ArrayList<>();
switch (keyTypeConstraint.tag) {
case TypeTags.TUPLE:
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
break;
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) keyTypeConstraint).getFields();
memberTypes = fieldList.entrySet().stream()
.filter(e -> fieldNameList.contains(e.getKey())).map(entry -> entry.getValue().type)
.collect(Collectors.toList());
if (memberTypes.isEmpty()) {
memberTypes.add(keyTypeConstraint);
}
break;
default:
memberTypes.add(keyTypeConstraint);
}
if (isKeySpecifierEmpty && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (isKeySpecifierEmpty ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
if (isKeySpecifierEmpty) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_EMPTY_FOR_PROVIDED_KEY_CONSTRAINT, memberTypes);
} else {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes, tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList);
}
data.resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
data.resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public boolean validateMapConstraintTable(BType expType) {
if (expType != null && (!((BTableType) expType).fieldNameList.isEmpty() ||
((BTableType) expType).keyTypeConstraint != null) &&
!expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
return true;
}
return false;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames.isEmpty()) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor,
AnalyzerData data) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
data.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType, data);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor,
data);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (data.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType, data);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data.expType,
inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
data.expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor, data);
}
if (tag == TypeTags.TYPEREFDESC) {
return checkListConstructorCompatibility(Types.getReferredType(bType), listConstructor, data);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor, data);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType, data);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType, data);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType, data);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor, data);
case TypeTags.TYPEDESC:
listConstructor.isTypedescExpr = true;
List<BType> actualTypes = new ArrayList<>();
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpExprType = checkExpr(spreadOpExpr, symTable.noType, data);
actualTypes.addAll(getListConstSpreadOpMemberTypes(expr.pos, spreadOpExprType));
continue;
}
BType resultType = checkExpr(expr, symTable.noType, data);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(resultType);
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (data.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError, data);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType, data));
}
return symTable.semanticError;
}
private List<BType> getListConstSpreadOpMemberTypes(Location spreadMemberPos, BType spreadOpExprType) {
spreadOpExprType = Types.getReferredType(spreadOpExprType);
List<BType> memTypes = new ArrayList<>();
if (spreadOpExprType.tag == TypeTags.TUPLE && types.isFixedLengthTuple((BTupleType) spreadOpExprType)) {
memTypes.addAll(((BTupleType) spreadOpExprType).tupleTypes);
} else if (spreadOpExprType.tag == TypeTags.ARRAY &&
((BArrayType) spreadOpExprType).state == BArrayState.CLOSED) {
BArrayType bArrayType = (BArrayType) spreadOpExprType;
for (int i = 0; i < bArrayType.size; i++) {
memTypes.add(bArrayType.eType);
}
} else {
dlog.error(spreadMemberPos, DiagnosticErrorCode.CANNOT_INFER_TYPE_FROM_SPREAD_OP);
memTypes.add(symTable.semanticError);
}
return memTypes;
}
private BType getListConstructorCompatibleNonUnionType(BType type, AnalyzerData data) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
data.env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
data.env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAllType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAllType, data.env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
case TypeTags.TYPEREFDESC:
return Types.getReferredType(type);
}
return symTable.semanticError;
}
private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType, AnalyzerData data) {
int listExprSize = 0;
if (arrayType.state != BArrayState.OPEN) {
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
listExprSize++;
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, data);
spreadOpType = Types.getReferredType(spreadOpType);
switch (spreadOpType.tag) {
case TypeTags.ARRAY:
int arraySize = ((BArrayType) spreadOpType).size;
if (arraySize >= 0) {
listExprSize += arraySize;
continue;
}
dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
case TypeTags.TUPLE:
BTupleType tType = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(tType)) {
listExprSize += tType.tupleTypes.size();
continue;
}
dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
}
}
}
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listExprSize;
arrayType.state = BArrayState.CLOSED;
} else if (arrayType.state != BArrayState.OPEN && arrayType.size != listExprSize) {
if (arrayType.size < listExprSize) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listExprSize);
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
data.expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
errored |= exprIncompatible(eType, expr, data);
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, data);
BType spreadOpReferredType = Types.getReferredType(spreadOpType);
switch (spreadOpReferredType.tag) {
case TypeTags.ARRAY:
BType spreadOpeType = ((BArrayType) spreadOpReferredType).eType;
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpeType, eType)) {
return symTable.semanticError;
}
break;
case TypeTags.TUPLE:
BTupleType spreadOpTuple = (BTupleType) spreadOpReferredType;
List<BType> tupleTypes = spreadOpTuple.tupleTypes;
for (BType tupleMemberType : tupleTypes) {
if (types.typeIncompatible(spreadOpExpr.pos, tupleMemberType, eType)) {
return symTable.semanticError;
}
}
if (!types.isFixedLengthTuple(spreadOpTuple)) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.restType, eType)) {
return symTable.semanticError;
}
}
break;
default:
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_LIST_SPREAD_OP, spreadOpType);
return symTable.semanticError;
}
}
return errored ? symTable.semanticError : arrayType;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType, AnalyzerData data) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
int memberTypeSize = memberTypes.size();
BType restType = tupleType.restType;
if (types.isFixedLengthTuple(tupleType)) {
int listExprSize = 0;
for (BLangExpression expr : exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
listExprSize++;
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, data);
spreadOpType = Types.getReferredType(spreadOpType);
switch (spreadOpType.tag) {
case TypeTags.ARRAY:
int arraySize = ((BArrayType) spreadOpType).size;
if (arraySize >= 0) {
listExprSize += arraySize;
continue;
}
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
case TypeTags.TUPLE:
BTupleType tType = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(tType)) {
listExprSize += tType.tupleTypes.size();
continue;
}
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
}
}
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
memberTypes.get(i));
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.TUPLE_AND_EXPRESSION_SIZE_DOES_NOT_MATCH);
return symTable.semanticError;
}
}
boolean errored = false;
int nonRestTypeIndex = 0;
for (BLangExpression expr : exprs) {
int remainNonRestCount = memberTypeSize - nonRestTypeIndex;
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
if (remainNonRestCount > 0) {
errored |= exprIncompatible(memberTypes.get(nonRestTypeIndex), expr, data);
nonRestTypeIndex++;
} else {
errored |= exprIncompatible(restType, expr, data);
}
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, data);
BType spreadOpReferredType = Types.getReferredType(spreadOpType);
switch (spreadOpReferredType.tag) {
case TypeTags.ARRAY:
BArrayType spreadOpArray = (BArrayType) spreadOpReferredType;
if (spreadOpArray.state == BArrayState.CLOSED) {
for (int i = 0; i < spreadOpArray.size && nonRestTypeIndex < memberTypeSize;
i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType,
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
if (remainNonRestCount < spreadOpArray.size) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType, restType)) {
return symTable.semanticError;
}
}
continue;
}
if (remainNonRestCount > 0) {
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_MEMBER_EXPECTED,
memberTypes.get(nonRestTypeIndex));
return symTable.semanticError;
}
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType, restType)) {
return symTable.semanticError;
}
break;
case TypeTags.TUPLE:
BTupleType spreadOpTuple = (BTupleType) spreadOpReferredType;
int spreadOpMemberTypeSize = spreadOpTuple.tupleTypes.size();
if (types.isFixedLengthTuple(spreadOpTuple)) {
for (int i = 0; i < spreadOpMemberTypeSize && nonRestTypeIndex < memberTypeSize;
i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i),
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
for (int i = remainNonRestCount; i < spreadOpMemberTypeSize; i++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i), restType)) {
return symTable.semanticError;
}
}
continue;
}
if (spreadOpMemberTypeSize < remainNonRestCount) {
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_MEMBER_EXPECTED,
memberTypes.get(nonRestTypeIndex + spreadOpMemberTypeSize));
return symTable.semanticError;
}
for (int i = 0; nonRestTypeIndex < memberTypeSize; i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i),
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
for (int i = nonRestTypeIndex; i < spreadOpMemberTypeSize; i++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i), restType)) {
return symTable.semanticError;
}
}
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.restType, restType)) {
return symTable.semanticError;
}
break;
default:
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_LIST_SPREAD_OP, spreadOpType);
return symTable.semanticError;
}
}
while (nonRestTypeIndex < memberTypeSize) {
if (!types.hasFillerValue(memberTypes.get(nonRestTypeIndex))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
memberTypes.get(nonRestTypeIndex));
return symTable.semanticError;
}
nonRestTypeIndex++;
}
return errored ? symTable.semanticError : tupleType;
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor, AnalyzerData data) {
if (!data.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType, data);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorSpreadOpExpr) expr).expr;
}
if (exprIncompatible(symTable.readonlyType, expr, data)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr, AnalyzerData data) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (data.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, eType, data) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, AnalyzerData data) {
return checkExprList(exprs, env, symTable.noType, data);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType, AnalyzerData data) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = data.env;
BType preExpType = data.expType;
data.env = env;
data.expType = expType;
for (BLangExpression e : exprs) {
if (e.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) e).expr;
BType spreadOpExprType = checkExpr(spreadOpExpr, expType, data);
types.addAll(getListConstSpreadOpMemberTypes(e.pos, spreadOpExprType));
continue;
}
checkExpr(e, expType, data);
types.add(data.resultType);
}
data.env = prevEnv;
data.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType, AnalyzerData data) {
List<BType> memTypes = checkExprList(listConstructor.exprs, data.env, expType, data);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral, AnalyzerData data) {
BType expType = data.expType;
int expTypeTag = Types.getReferredType(expType).tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType, data);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
data.resultType = symTable.semanticError;
return;
}
data.resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral, data), data);
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType,
AnalyzerData data) {
BType refType = Types.getReferredType(applicableMappingType);
if (applicableMappingType == symTable.semanticError ||
(refType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
refType.tag == TypeTags.RECORD ? ((BRecordType) refType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = data.env.enclPkg.symbol.pkgID;
Location pos = recordLiteral.pos;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL, data);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (refType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) refType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) refType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, data.env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, data.env);
if (refType.tag == TypeTags.RECORD) {
BRecordType applicableRecordType = (BRecordType) refType;
BTypeSymbol applicableRecordTypeSymbol = applicableRecordType.tsymbol;
BLangUserDefinedType origTypeRef = new BLangUserDefinedType(
ASTBuilderUtil.createIdentifier(
pos,
TypeDefBuilderHelper.getPackageAlias(data.env, pos.lineRange().filePath(),
applicableRecordTypeSymbol.pkgID)),
ASTBuilderUtil.createIdentifier(pos, applicableRecordTypeSymbol.name.value));
origTypeRef.pos = pos;
origTypeRef.setBType(applicableRecordType);
recordTypeNode.typeRefs.add(origTypeRef);
} else if (refType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor,
AnalyzerData data) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType, data);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor, data);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType, data);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError, data);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError, data);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor, data);
}
if (tag == TypeTags.TYPEREFDESC) {
BType refType = Types.getReferredType(bType);
BType compatibleType = checkMappingConstructorCompatibility(refType, mappingConstructor, data);
return compatibleType == refType ? bType : compatibleType;
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor,
data);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType, data);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType, data) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType, data);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos, data);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor, data);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType, data);
validateSpecifiedFields(mappingConstructor, symTable.semanticError, data);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor, AnalyzerData data) {
if (!data.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType, data);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor, data);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck, data)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type, AnalyzerData data) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, data.env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
data.env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAllType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAllType, data.env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
case TypeTags.TYPEREFDESC:
return getMappingConstructorCompatibleNonUnionType(((BTypeReferenceType) type).referredType, data);
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return Types.getReferredType(type).tag == TypeTags.RECORD
|| Types.getReferredType(type).tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType,
AnalyzerData data) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = types.getAllTypes(unionType, true).toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType, data);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos, data);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType,
AnalyzerData data) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, Types.getReferredType(possibleType), data);
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos, AnalyzerData data) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields, data);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields, AnalyzerData data) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField, data));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field,
AnalyzerData data) {
BType spreadType = Types.getReferredType(checkExpr(field.expr, data));
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr, AnalyzerData data) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(data.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(data.env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
data.resultType = types.checkType(workerFlushExpr, actualType, data.expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr, AnalyzerData data) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = data.env;
checkExpr(syncSendExpr.expr, data);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(data.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = data.expType;
data.resultType = data.expType == symTable.noType ? symTable.nilType : data.expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr, AnalyzerData data) {
BSymbol symbol =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = data.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == data.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(data.expType);
data.resultType = data.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef, AnalyzerData data) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, data.env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), data.expType);
data.resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr, AnalyzerData data) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
varRefExpr.setBType(this.symTable.anyType);
varRefExpr.symbol = new BVarSymbol(0, true, varName,
names.originalNameFromIdNode(varRefExpr.variableName),
data.env.enclPkg.symbol.pkgID, varRefExpr.getBType(), data.env.scope.owner,
varRefExpr.pos, VIRTUAL);
data.resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(data.env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, data.env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && data.env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(data.env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, data.env, objFuncName,
data.env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, data.env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, data.env, data);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
BType expectedType = Types.getReferredType(data.expType);
if (symbolType != symTable.noType && expectedType.tag == TypeTags.FINITE ||
(expectedType.tag == TypeTags.UNION && types.getAllTypes(expectedType, true).stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (data.expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) data.expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_INFER_SIZE_ARRAY_SIZE_FROM_THE_CONTEXT);
data.resultType = symTable.semanticError;
return;
}
data.resultType = types.checkType(varRefExpr, actualType, data.expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr, AnalyzerData data) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(data.env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
data.env.enclPkg.symbol.pkgID, null, data.env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, data.env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, data);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
names.originalNameFromIdNode(recordRefField.variableName),
data.env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, data);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
data.resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(),
data.env.enclPkg.symbol.pkgID, bRecordType, data.env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
data.resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr, AnalyzerData data) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, data.env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr, data);
data.resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, data);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, data);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, data);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, data);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
data.resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
data.resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
data.resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr, AnalyzerData data) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, data);
checkExpr(detailItem, detailItem.expr.getBType(), data);
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, data);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, data);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, data);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr, AnalyzerData data) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), symTable.noType, data));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, symTable.noType, data);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
data.resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
data.resultType = types.checkType(varRefExpr, actualType, data.expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node == null) {
return env;
}
NodeKind kind = env.enclEnv.node.getKind();
if (kind == NodeKind.ARROW_EXPR || kind == NodeKind.ON_FAIL) {
return env.enclEnv;
}
if (kind == NodeKind.CLASS_DEFN) {
return env.enclEnv.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null) {
NodeKind kind = env.enclEnv.node.getKind();
if (kind == NodeKind.ARROW_EXPR || kind == NodeKind.ON_FAIL || kind == NodeKind.CLASS_DEFN) {
return env.enclEnv;
}
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess,
AnalyzerData data) {
checkFieldBasedAccess(nsPrefixedFieldBasedAccess, true, data);
}
public void visit(BLangFieldBasedAccess fieldAccessExpr, AnalyzerData data) {
checkFieldBasedAccess(fieldAccessExpr, false, data);
}
private void checkFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr, boolean isNsPrefixed, AnalyzerData data) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, data));
if (isNsPrefixed && !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
data.resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
data.resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field), data);
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field),
data);
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType, data)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
data.resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
data.resultType = symTable.semanticError;
return;
}
}
}
data.resultType = types.checkType(fieldAccessExpr, actualType, data.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type, AnalyzerData data) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return data.env.enclInvokable != null && initializerFunc != null &&
data.env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (Types.getReferredType(type).tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) Types.getReferredType(type);
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) Types.getReferredType(type)).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = Types.getReferredType(expr.getBType());
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr, AnalyzerData data) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
data.resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& Types.getReferredType(containerExpression.getBType()).tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, symTable.noType, data);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
Types.getReferredType(containerExpression.getBType()).tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
data.resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr, data);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
data.resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && isConstExpr(indexExpr) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
data.resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
data.resultType = actualType;
return;
}
data.resultType = this.types.checkType(indexBasedAccessExpr, actualType, data.expType);
}
public void visit(BLangInvocation iExpr, AnalyzerData data) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr, data);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, symTable.noType, data);
BType varRefType = iExpr.expr.getBType();
visitInvocation(iExpr, varRefType, data);
}
private void visitInvocation(BLangInvocation iExpr, BType varRefType, AnalyzerData data) {
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType, data);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, data);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.TYPEREFDESC:
visitInvocation(iExpr, Types.getReferredType(varRefType), data);
break;
case TypeTags.INTERSECTION:
visitInvocation(iExpr, ((BIntersectionType) varRefType).effectiveType, data);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType, data);
}
}
public void visit(BLangErrorConstructorExpr errorConstructorExpr, AnalyzerData data) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, data.env,
DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr, data);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr, data);
List<BType> errorDetailTypes = new ArrayList<>(expandedCandidates.size());
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) Types.getReferredType(expandedCandidate)).detailType;
errorDetailTypes.add(Types.getReferredType(detailType));
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, data);
int index = errorDetailTypes.indexOf(inferredDetailType);
BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null
|| Types.getReferredType(userProvidedTypeRef.getBType()) == Types.getReferredType(selectedCandidate))) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType, data);
data.resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, data.expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, data.expType);
}
boolean validTypeRefFound = false;
BErrorType errorType;
if (userProvidedTypeRef != null
&& Types.getReferredType(userProvidedTypeRef.getBType()).tag == TypeTags.ERROR) {
errorType = (BErrorType) Types.getReferredType(userProvidedTypeRef.getBType());
validTypeRefFound = true;
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) Types.getReferredType(expandedCandidates.get(0));
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType, data);
BType detailType = errorType.detailType;
if (Types.getReferredType(detailType).tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) Types.getReferredType(detailType)).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (Types.getReferredType(detailType).tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) Types.getReferredType(errorType.detailType);
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(Types.getReferredType(userProvidedTypeRef.getBType()));
} else {
errorConstructorExpr.setBType(errorType);
}
BType resolvedType = errorConstructorExpr.getBType();
if (resolvedType != symTable.semanticError && data.expType != symTable.noType &&
!types.isAssignable(resolvedType, data.expType)) {
if (validTypeRefFound) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
data.expType, userProvidedTypeRef);
} else {
dlog.error(errorConstructorExpr.pos,
DiagnosticErrorCode.ERROR_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, data.expType);
}
data.resultType = symTable.semanticError;
return;
}
data.resultType = resolvedType;
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr,
AnalyzerData data) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), symTable.stringType, data);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), symTable.errorOrNilType, data);
}
}
private BType checkExprSilent(BLangExpression expr, BType expType, AnalyzerData data) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(expr, expType, data);
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr,
AnalyzerData data) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (Types.getReferredType(data.expType).tag == TypeTags.ERROR) {
return List.of(data.expType);
} else if (types.isAssignable(data.expType, symTable.errorType) || data.expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(data.expType);
}
} else {
BType errorType = Types.getReferredType(errorTypeRef.getBType());
if (errorType.tag != TypeTags.ERROR) {
if (errorType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
errorConstructorExpr.errorTypeRef.setBType(symTable.semanticError);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
BType referredType = Types.getReferredType(candidateType);
List<BType> expandedCandidates = new ArrayList<>();
if (referredType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) referredType).getMemberTypes()) {
memberType = Types.getReferredType(memberType);
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (referredType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) referredType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv, AnalyzerData data) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv, data);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, symTable.noType, data);
BLangExpression varRef = aInv.expr;
checkActionInvocation(aInv, varRef.getBType(), data);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BType type, AnalyzerData data) {
switch (type.tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) type, data);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, data);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
data.resultType = symTable.semanticError;
break;
case TypeTags.TYPEREFDESC:
checkActionInvocation(aInv, Types.getReferredType(type), data);
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, type);
data.resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression, AnalyzerData data) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", data.letCount++)),
data.env.enclPkg.symbol.pkgID, letExpression.getBType(), data.env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, data.env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeNode((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, data.expType, data);
types.checkType(letExpression, exprType, data.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType, AnalyzerData data) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, Types.getReferredType(varRefType), data);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
data.resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol, data)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType, data);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol, AnalyzerData data) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && Types.getReferredType(varRefType).tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(Types.getReferredType(varRefType).tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
data.resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType, AnalyzerData data) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
data.resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
data.resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private void checkFieldFunctionPointer(BLangInvocation iExpr, AnalyzerData data) {
BType type = checkExpr(iExpr.expr, data);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, data.env,
names.fromIdNode(invocationIdentifier),
Types.getReferredType(type).tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD, data,
invocationIdentifier, type);
return;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
data, fieldSymbol.type);
return;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr, data);
iExpr.functionPointerInvocation = true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, AnalyzerData data, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType, data);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
data.resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol, data);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression, AnalyzerData data) {
BLangClassDefinition classNode = objectCtorExpression.classNode;
classNode.oceEnvData.capturedClosureEnv = data.env;
BLangClassDefinition originalClass = classNode.oceEnvData.originalClass;
if (originalClass.cloneRef != null && !objectCtorExpression.defined) {
classNode = (BLangClassDefinition) originalClass.cloneRef;
symbolEnter.defineClassDefinition(classNode, data.env);
objectCtorExpression.defined = true;
}
BObjectType objectType;
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (Types.getReferredType(objectCtorExpression.expectedType).tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) types
.getReferredType(objectCtorExpression.expectedType);
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
data.resultType = symTable.semanticError;
return;
}
}
}
BLangTypeInit cIExpr = objectCtorExpression.typeInit;
BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, data.env);
if (actualType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
BObjectType actualObjectType = (BObjectType) actualType;
List<BLangType> typeRefs = classNode.typeRefs;
SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, data.env);
classNode.oceEnvData.typeInit = objectCtorExpression.typeInit;
dlog.unmute();
if (Symbols.isFlagOn(data.expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false, data);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true, data);
} else {
semanticAnalyzer.analyzeNode(classNode, typeDefEnv);
}
dlog.unmute();
markConstructedObjectIsolatedness(actualObjectType);
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation, data);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType, data)) {
return;
}
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType(), data);
data.resultType = types.checkType(cIExpr, actualTypeInitType, data.expType);
}
private boolean isDefiniteObjectType(BType bType, Set<BTypeIdSet> typeIdSets) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr, AnalyzerData data) {
if ((data.expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) ||
data.expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, data.expType);
data.resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, data.env);
} else {
actualType = data.expType;
}
if (actualType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
actualType = Types.getReferredType(actualType);
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, symTable.noType, data));
data.resultType = symTable.semanticError;
return;
}
if (actualObjectType.classDef != null && actualObjectType.classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
if (cIExpr.initInvocation != null && actualObjectType.classDef.oceEnvData.typeInit != null) {
actualObjectType.classDef.oceEnvData.typeInit = cIExpr;
}
markConstructedObjectIsolatedness(actualObjectType);
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation, data);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType, data)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
data.resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (!types.isAssignable(completionType, symTable.errorOrNilType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
data.resultType = symTable.semanticError;
return;
}
}
BUnionType expectedNextReturnType =
createNextReturnType(cIExpr.pos, (BStreamType) actualType, data);
if (cIExpr.initInvocation.argExprs.isEmpty()) {
if (!types.containsNilType(actualStreamType.completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_UNBOUNDED_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType);
data.resultType = symTable.semanticError;
return;
}
} else {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, symTable.noType, data);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
data.resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
data.resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
data.resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (data.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, data.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data.expType,
actualType);
data.resultType = symTable.semanticError;
return;
}
data.resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType, data);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType, data);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation, data);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType, data)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, data.expType);
cIExpr.setBType(matchedType);
data.resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
data.resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType(), data);
data.resultType = types.checkType(cIExpr, actualTypeInitType, data.expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType, AnalyzerData data) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, data.env.enclPkg.packageID,
streamType.constraint, data.env.scope.owner, pos,
VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, data.env.enclPkg.packageID,
recordType, data.env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(data.env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType, false));
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
data.env.enclPkg.symbol.pkgID, unionType, data.env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType, AnalyzerData data) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, symTable.noType, data));
data.resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType, AnalyzerData data) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, data.env.enclPkg.symbol.pkgID, unionType,
data.env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType,
AnalyzerData data) {
int objectCount = 0;
for (BType type : lhsUnionType.getMemberTypes()) {
BType memberType = Types.getReferredType(type);
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType type : lhsUnionType.getMemberTypes()) {
BType memberType = Types.getReferredType(type);
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc, data)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion,
AnalyzerData data) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
data.resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
data.resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function,
AnalyzerData data) {
invocationArguments.forEach(expr -> checkExpr(expr, symTable.noType, data));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, data);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr, AnalyzerData data) {
setResultTypeForWaitForAllExpr(waitForAllExpr, data.expType, data);
waitForAllExpr.setBType(data.resultType);
if (data.resultType != null && data.resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), data.expType);
}
}
private void setResultTypeForWaitForAllExpr(BLangWaitForAllExpr waitForAllExpr, BType expType, AnalyzerData data) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr, data);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint, data);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
data.resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
data.resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType, data);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
data.resultType =
new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
data.resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
case TypeTags.TYPEREFDESC:
setResultTypeForWaitForAllExpr(waitForAllExpr, Types.getReferredType(expType), data);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos, data));
data.resultType = symTable.semanticError;
break;
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos, AnalyzerData data) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key),
names.originalNameFromIdNode(keyVal.key),
data.env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, data.env.enclPkg.packageID, retType,
null, pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType, AnalyzerData data) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType, data));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr, AnalyzerData data) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) Types.getReferredType(data.expType)).fields;
if (((BRecordType) Types.getReferredType(data.expType)).sealed &&
rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data.expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos, data));
data.resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) Types.getReferredType(data.expType)).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, data.expType);
data.resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) Types.getReferredType(data.expType)).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType, data);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type, data);
keyVal.keySymbol = lhsFields.get(key).symbol;
}
}
checkMissingReqFieldsForWait(((BRecordType) Types.getReferredType(data.expType)),
rhsFields, waitExpr.pos);
if (symTable.semanticError != data.resultType) {
data.resultType = data.expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type, AnalyzerData data) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, futureType, data);
setEventualTypeForExpression(expr, type, data);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType, AnalyzerData data) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression, data)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression, Location pos, AnalyzerData data) {
if ((data.resultType == symTable.semanticError) ||
(types.containsErrorType(data.resultType))) {
return;
}
if (isSimpleWorkerReference(expression, data)) {
return;
}
BType currentExpectedType = ((BFutureType) data.expType).constraint;
BUnionType eventualType = BUnionType.create(null, data.resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
data.resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
data.resultType = symTable.semanticError;
return;
}
if (data.resultType.tag == TypeTags.FUTURE) {
((BFutureType) data.resultType).constraint = eventualType;
} else {
data.resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos,
AnalyzerData data) {
if ((data.resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(data.resultType))) {
return;
}
if (types.containsErrorType(data.resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression, data)) {
return;
}
BType currentExpectedType = ((BFutureType) data.expType).constraint;
BUnionType eventualType = BUnionType.create(null, data.resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
data.resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
data.resultType = symTable.semanticError;
return;
}
if (data.resultType.tag == TypeTags.FUTURE) {
((BFutureType) data.resultType).constraint = eventualType;
} else {
data.resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression, AnalyzerData data) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(data.env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr, AnalyzerData data) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr, data)) {
return true;
}
return isReferencingNonWorker(rhsExpr, data);
}
private boolean isReferencingNonWorker(BLangExpression expression, AnalyzerData data) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression, data);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(data.env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr, AnalyzerData data) {
BType condExprType = checkExpr(ternaryExpr.expr, this.symTable.booleanType, data);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, data.env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, data.expType, data);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, data.env, false);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, data.expType, data);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
data.resultType = symTable.semanticError;
} else if (data.expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
data.resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
data.resultType = elseType;
} else {
data.resultType = BUnionType.create(null, thenType, elseType);
}
} else {
data.resultType = data.expType;
}
}
public void visit(BLangWaitExpr waitExpr, AnalyzerData data) {
data.expType = new BFutureType(TypeTags.FUTURE, data.expType, null);
checkExpr(waitExpr.getExpression(), data.expType, data);
if (data.resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) data.resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
data.resultType = memberTypes.toArray(new BType[0])[0];
} else {
data.resultType = BUnionType.create(null, memberTypes);
}
} else if (data.resultType != symTable.semanticError) {
data.resultType = ((BFutureType) data.resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos, data);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos, data);
}
waitExpr.setBType(data.resultType);
if (data.resultType != null && data.resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) data.expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr, AnalyzerData data) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, data.expType, data);
boolean definedWithVar = data.expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
data.isTypeChecked = false;
data.resultType = data.expType;
return;
} else {
data.expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (data.expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
data.resultType = types.checkType(trapExpr, actualType, data.expType);
if (definedWithVar && data.resultType != null && data.resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), data.resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr, AnalyzerData data) {
if (data.expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, data.expType, data);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, data.expType, data);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
data.resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
SymbolEnv rhsExprEnv;
BType lhsType;
BType referredExpType = Types.getReferredType(binaryExpr.expectedType);
if (referredExpType.tag == TypeTags.FLOAT || referredExpType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(referredExpType)) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, data.env, binaryExpr, data);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, data);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, data.env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, data.env, true);
} else {
rhsExprEnv = data.env;
}
BType rhsType;
if (referredExpType.tag == TypeTags.FLOAT || referredExpType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(referredExpType)) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr, data);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv, data);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr, data.env);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getRangeOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
DiagnosticErrorCode errorCode = DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES;
if ((binaryExpr.opKind == OperatorKind.DIV || binaryExpr.opKind == OperatorKind.MOD) &&
lhsType.tag == TypeTags.INT &&
(rhsType.tag == TypeTags.DECIMAL || rhsType.tag == TypeTags.FLOAT)) {
errorCode = DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES_INT_FLOAT_DIVISION;
}
dlog.error(binaryExpr.pos, errorCode, binaryExpr.opKind, lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
data.resultType = types.checkType(binaryExpr, actualType, data.expType);
}
private boolean isOptionalFloatOrDecimal(BType expectedType) {
if (expectedType.tag == TypeTags.UNION && expectedType.isNullable() && expectedType.tag != TypeTags.ANY) {
Iterator<BType> memberTypeIterator = ((BUnionType) expectedType).getMemberTypes().iterator();
while (memberTypeIterator.hasNext()) {
BType memberType = memberTypeIterator.next();
if (memberType.tag == TypeTags.FLOAT || memberType.tag == TypeTags.DECIMAL) {
return true;
}
}
}
return false;
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr, AnalyzerData data) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType, data);
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType, data);
} else {
return checkExpr(expr, env, data);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr, AnalyzerData data) {
data.resultType = types.checkType(transactionalExpr, symTable.booleanType, data.expType);
}
public void visit(BLangCommitExpr commitExpr, AnalyzerData data) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
data.resultType = types.checkType(commitExpr, actualType, data.expType);
}
private BType getXMLConstituents(BType bType) {
BType type = Types.getReferredType(bType);
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
public void visit(BLangElvisExpr elvisExpr, AnalyzerData data) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, data);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, data.expType, data);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, data.expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
data.resultType = symTable.semanticError;
} else if (data.expType == symTable.noType) {
if (types.isAssignable(rhsReturnType, lhsReturnType)) {
data.resultType = lhsReturnType;
} else if (types.isAssignable(lhsReturnType, rhsReturnType)) {
data.resultType = rhsReturnType;
} else {
data.resultType = BUnionType.create(null, lhsReturnType, rhsReturnType);
}
} else {
data.resultType = data.expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr, AnalyzerData data) {
data.resultType = checkExpr(groupExpr.expression, data.expType, data);
}
public void visit(BLangTypedescExpr accessExpr, AnalyzerData data) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, data.env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
data.resultType = types.checkType(accessExpr, actualType, data.expType);
}
public LinkedHashSet<BType> getBasicNumericTypesInFiniteType(BType referredType) {
BFiniteType finiteType = (BFiniteType) referredType;
Set<BLangExpression> valueSpace = finiteType.getValueSpace();
LinkedHashSet<BType> basicNumericTypes = new LinkedHashSet<>(valueSpace.size());
for (BLangExpression value : valueSpace) {
BType referredTypeInFiniteType = types.getReferredType(value.getBType());
int typeTag = referredTypeInFiniteType.tag;
if (TypeTags.isIntegerTypeTag(typeTag) || typeTag == TypeTags.FLOAT || typeTag == TypeTags.DECIMAL) {
if (TypeTags.isIntegerTypeTag(typeTag)) {
basicNumericTypes.add(symTable.intType);
} else {
basicNumericTypes.add(value.getBType());
}
} else if (typeTag == TypeTags.JSON || typeTag == TypeTags.ANYDATA || typeTag == TypeTags.ANY) {
basicNumericTypes.add(symTable.intType);
basicNumericTypes.add(symTable.floatType);
basicNumericTypes.add(symTable.decimalType);
break;
} else if (typeTag == TypeTags.FINITE) {
LinkedHashSet<BType> numericTypes = getBasicNumericTypesInFiniteType(referredTypeInFiniteType);
basicNumericTypes.addAll(numericTypes);
}
}
return basicNumericTypes;
}
public LinkedHashSet<BType> getBasicNumericTypesInUnionType(BType referredType) {
BUnionType referredUnionType = (BUnionType) referredType;
LinkedHashSet<BType> memberTypes = referredUnionType.getMemberTypes();
LinkedHashSet<BType> basicNumericTypes = new LinkedHashSet<>(memberTypes.size());
for (BType value : memberTypes) {
BType referredTypeInUnionType = types.getReferredType(value);
int typeTag = referredTypeInUnionType.tag;
if (TypeTags.isIntegerTypeTag(typeTag) || typeTag == TypeTags.FLOAT
|| typeTag == TypeTags.DECIMAL) {
if (TypeTags.isIntegerTypeTag(typeTag)) {
basicNumericTypes.add(symTable.intType);
} else {
basicNumericTypes.add(value);
}
} else if (typeTag == TypeTags.JSON || typeTag == TypeTags.ANYDATA || typeTag == TypeTags.ANY) {
basicNumericTypes.add(symTable.intType);
basicNumericTypes.add(symTable.floatType);
basicNumericTypes.add(symTable.decimalType);
break;
} else if (typeTag == TypeTags.FINITE) {
LinkedHashSet<BType> numericTypes = getBasicNumericTypesInFiniteType(referredTypeInUnionType);
basicNumericTypes.addAll(numericTypes);
}
}
return basicNumericTypes;
}
public BLangNumericLiteral createNumericLiteralFromUnaryExpr(BLangUnaryExpr unaryExpr) {
BLangExpression exprInUnary = unaryExpr.expr;
BLangNumericLiteral numericLiteralInUnary = (BLangNumericLiteral) exprInUnary;
Object objectValueInUnary = numericLiteralInUnary.value;
String strValueInUnary = String.valueOf(numericLiteralInUnary.value);
if (OperatorKind.ADD.equals(unaryExpr.operator)) {
strValueInUnary = "+" + strValueInUnary;
} else if (OperatorKind.SUB.equals(unaryExpr.operator)) {
strValueInUnary = "-" + strValueInUnary;
}
if (objectValueInUnary instanceof Long) {
objectValueInUnary = Long.parseLong(strValueInUnary);
} else if (objectValueInUnary instanceof Double) {
objectValueInUnary = Double.parseDouble(strValueInUnary);
} else if (objectValueInUnary instanceof String) {
objectValueInUnary = strValueInUnary;
}
BLangNumericLiteral newNumericLiteral = (BLangNumericLiteral)
TreeBuilder.createNumericLiteralExpression();
newNumericLiteral.kind = ((BLangNumericLiteral) unaryExpr.expr).kind;
newNumericLiteral.pos = unaryExpr.expr.pos;
newNumericLiteral.setBType(exprInUnary.getBType());
newNumericLiteral.value = objectValueInUnary;
newNumericLiteral.originalValue = strValueInUnary;
newNumericLiteral.expectedType = exprInUnary.getBType();
return newNumericLiteral;
}
public BType createFiniteTypeForNumericUnaryExpr(BLangUnaryExpr unaryExpr, AnalyzerData data) {
BLangNumericLiteral newNumericLiteral = createNumericLiteralFromUnaryExpr(unaryExpr);
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE,
0, Names.EMPTY, data.env.enclPkg.symbol.pkgID, null, data.env.scope.owner,
unaryExpr.pos, SOURCE);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
finiteType.addValue(newNumericLiteral);
finiteTypeSymbol.type = finiteType;
types.setImplicitCastExpr(unaryExpr, unaryExpr.expr.getBType(), data.expType);
return finiteType;
}
public BType checkCompatibilityWithConstructedNumericLiteral(BLangUnaryExpr unaryExpr, BType referredType,
AnalyzerData data) {
if (unaryExpr.expr.getKind() != NodeKind.NUMERIC_LITERAL) {
return silentTypeCheckExpr(unaryExpr.expr, referredType, data);
}
BLangNumericLiteral numericLiteral = createNumericLiteralFromUnaryExpr(unaryExpr);
return silentTypeCheckExpr(numericLiteral, referredType, data);
}
public LinkedHashSet getIntSubtypesInUnionType(BUnionType expectedType) {
LinkedHashSet<BType> intTypesInUnion = new LinkedHashSet<>(expectedType.getMemberTypes().size());
for (BType type : expectedType.getMemberTypes()) {
BType referredType = types.getReferredType(type);
if (type.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(referredType.tag)) {
intTypesInUnion.add(referredType);
}
}
return intTypesInUnion;
}
public boolean silentCompatibleFiniteMembersInUnionTypeCheck(BLangUnaryExpr unaryExpr, BUnionType expectedType,
AnalyzerData data) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType compatibleTypeOfUnaryExpression;
for (BType type : expectedType.getMemberTypes()) {
compatibleTypeOfUnaryExpression = checkExpr(nodeCloner.cloneNode(unaryExpr), types.getReferredType(type),
data);
if (compatibleTypeOfUnaryExpression.tag == TypeTags.FINITE) {
unmuteDlog(data, prevNonErrorLoggingCheck, prevErrorCount);
return true;
}
}
unmuteDlog(data, prevNonErrorLoggingCheck, prevErrorCount);
return false;
}
private void unmuteDlog(AnalyzerData data, boolean prevNonErrorLoggingCheck, int prevErrorCount) {
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType silentTypeCheckExpr(BLangExpression expr, BType referredType, AnalyzerData data) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), referredType, data);
unmuteDlog(data, prevNonErrorLoggingCheck, prevErrorCount);
return exprCompatibleType;
}
public void visit(BLangUnaryExpr unaryExpr, AnalyzerData data) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, data);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, data);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
actualType = getActualTypeForOtherUnaryExpr(unaryExpr, data);
}
data.resultType = types.checkType(unaryExpr, actualType, data.expType);
}
public void visit(BLangTypeConversionExpr conversionExpr, AnalyzerData data) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, data.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
data.resultType = checkExpr(expr, data.expType, data);
}
return;
}
if (conversionExpr.typeNode.getKind() == NodeKind.FINITE_TYPE_NODE) {
semanticAnalyzer.analyzeNode(conversionExpr.typeNode, data.env);
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, data.env), data);
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), targetType, data);
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, targetType, data);
} else {
checkExpr(expr, symTable.noType, data);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, data.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
data.resultType = types.checkType(conversionExpr, actualType, data.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction, AnalyzerData data) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = data.env.createClone();
BLangFunction function = bLangLambdaFunction.function;
symResolver.checkRedeclaredSymbols(bLangLambdaFunction);
if (!data.nonErrorLoggingCheck) {
data.env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
data.resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), data.expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction, AnalyzerData data) {
BType expectedType = Types.getReferredType(data.expType);
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
data.resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes, data);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType,
data));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
for (BLangSimpleVariable simpleVariable : bLangArrowFunction.params) {
if (simpleVariable.symbol != null) {
symResolver.checkForUniqueSymbol(simpleVariable.pos, data.env, simpleVariable.symbol);
}
}
data.resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName, AnalyzerData data) {
String prefix = bLangXMLQName.prefix.value;
data.resultType = types.checkType(bLangXMLQName, symTable.stringType, data.expType);
if (data.env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) data.env.node).isNamespaceDeclr = true;
return;
}
if (data.env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) data.env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(data.env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos, data);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
data.resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos, AnalyzerData data) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, data.env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute, AnalyzerData data) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, data.env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType, data);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType, data);
symbolEnter.defineNode(bLangXMLAttribute, data.env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral, AnalyzerData data) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, data.env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType, data);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType, data);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv, data);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv, data);
if (data.expType == symTable.noType) {
data.resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, data.expType);
return;
}
data.resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
data.expType, data);
if (Symbols.isFlagOn(data.resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral, data);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral, AnalyzerData data) {
BType expType = Types.getReferredType(data.expType);
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data.expType,
"XML Sequence");
data.resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
data.resultType = checkExpr(expressionItem, data.expType, data);
if (!xmlTypesInSequence.contains(data.resultType)) {
xmlTypesInSequence.add(data.resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
data.resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
data.resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
data.resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
item = Types.getReferredType(item);
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
data.resultType = symTable.semanticError;
return;
}
}
data.resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral, AnalyzerData data) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues, data);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
data.resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, data.expType);
return;
}
data.resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, data.expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral, AnalyzerData data) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments, data);
if (data.expType == symTable.noType) {
data.resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, data.expType);
return;
}
data.resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
data.expType, data);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral, AnalyzerData data) {
checkExpr(bLangXMLProcInsLiteral.target, symTable.stringType, data);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments, data);
if (data.expType == symTable.noType) {
data.resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, data.expType);
return;
}
data.resultType =
checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, data.expType, data);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString, AnalyzerData data) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments, data);
data.resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, data.expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral, AnalyzerData data) {
checkStringTemplateExprs(stringTemplateLiteral.exprs, data);
data.resultType = types.checkType(stringTemplateLiteral, symTable.stringType, data.expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral, AnalyzerData data) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, data.expType);
if (type == symTable.semanticError) {
data.resultType = type;
return;
}
BObjectType literalType = (BObjectType) Types.getReferredType(type);
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos, data)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos, data)) {
type = symTable.semanticError;
}
data.resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) Types.getReferredType(type);
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos, AnalyzerData data) {
BType listType = Types.getReferredType(fieldType);
listType = listType.tag != TypeTags.INTERSECTION ? listType :
((BIntersectionType) listType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, arrayType.eType, data) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), memberTypes.get(i), data) == symTable.semanticError) ||
errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), tupleType.restType, data) == symTable.semanticError) ||
errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType bType) {
BType type = Types.getReferredType(bType);
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType bType, Location pos) {
BType expType = Types.getReferredType(bType);
if (expType.tag != TypeTags.UNION) {
return bType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression, AnalyzerData data) {
data.resultType = checkExpr(bLangRestArgExpression.expr, data.expType, data);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr, AnalyzerData data) {
BType referredType = Types.getReferredType(data.expType);
if (referredType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, data.expType, symTable.typeDesc);
data.resultType = symTable.semanticError;
return;
}
data.resultType = referredType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression, AnalyzerData data) {
data.resultType = checkExpr(bLangNamedArgsExpression.expr, data.env, data.expType, data);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression, AnalyzerData data) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), data.env);
checkExpr(bLangMatchExpression.expr, matchExprEnv, data);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, data.expType, data);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
data.resultType = types.checkType(bLangMatchExpression, actualType, data.expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr, AnalyzerData data) {
data.checkWithinQueryExpr = isWithinQuery(data);
visitCheckAndCheckPanicExpr(checkedExpr, data);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr, AnalyzerData data) {
visitCheckAndCheckPanicExpr(checkedExpr, data);
}
@Override
public void visit(BLangQueryExpr queryExpr, AnalyzerData data) {
if (data.breakToParallelQueryEnv) {
data.queryEnvs.push(data.prevEnvs.peek());
} else {
data.queryEnvs.push(data.env);
data.prevEnvs.push(data.env);
}
data.queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this, data));
BType actualType = resolveQueryType(data.queryEnvs.peek(),
((BLangSelectClause) data.queryFinalClauses.peek()).expression,
collectionNode.getBType(), data.expType, queryExpr, data);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, data.expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
data.queryFinalClauses.pop();
data.queryEnvs.pop();
if (!data.breakToParallelQueryEnv) {
data.prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType, data)) {
data.resultType = symTable.semanticError;
return;
}
}
data.checkWithinQueryExpr = false;
data.resultType = actualType;
}
private boolean isWithinQuery(AnalyzerData data) {
return !data.queryEnvs.isEmpty() && !data.queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr, AnalyzerData data) {
List<BType> resultTypes = types.getAllTypes(targetType, true).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType, data);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true), data);
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true), data);
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type, data);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type, data);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr, data);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType);
markReadOnlyForConstraintType(constraintType);
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) {
for (IdentifierNode identifier : fieldList) {
BField field = types.getTableConstraintField(constraintType, identifier.getValue());
if (field == null) {
dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER,
identifier.getValue(), constraintType);
} else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
field.symbol.flags |= Flags.READONLY;
}
}
}
private void markReadOnlyForConstraintType(BType constraintType) {
if (constraintType.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) constraintType;
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
return;
}
}
if (recordType.sealed) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr, AnalyzerData data) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC), data.env);
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation(
(BObjectType) Types.getReferredType(invokableSymbol.retType));
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType, true).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (data.checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction, AnalyzerData data) {
if (data.breakToParallelQueryEnv) {
data.queryEnvs.push(data.prevEnvs.peek());
} else {
data.queryEnvs.push(data.env);
data.prevEnvs.push(data.env);
}
BLangDoClause doClause = queryAction.getDoClause();
data.queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this, data));
semanticAnalyzer.analyzeNode(doClause.body, SymbolEnv.createBlockEnv(doClause.body, data.queryEnvs.peek()),
data.prevEnvs);
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
data.resultType =
types.checkType(doClause.pos, actualType, data.expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
data.queryFinalClauses.pop();
data.queryEnvs.pop();
if (!data.breakToParallelQueryEnv) {
data.prevEnvs.pop();
}
}
@Override
public void visit(BLangFromClause fromClause, AnalyzerData data) {
boolean prevBreakToParallelEnv = data.breakToParallelQueryEnv;
BLangExpression collection = fromClause.collection;
if (collection.getKind() == NodeKind.QUERY_EXPR ||
(collection.getKind() == NodeKind.GROUP_EXPR
&& ((BLangGroupExpr) collection).expression.getKind() == NodeKind.QUERY_EXPR)) {
data.breakToParallelQueryEnv = true;
}
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, data.queryEnvs.pop());
fromClause.env = fromEnv;
data.queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, data.queryEnvs.peek(), data);
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, data.queryEnvs.peek());
data.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause, AnalyzerData data) {
boolean prevBreakEnv = data.breakToParallelQueryEnv;
BLangExpression collection = joinClause.collection;
if (collection.getKind() == NodeKind.QUERY_EXPR ||
(collection.getKind() == NodeKind.GROUP_EXPR
&& ((BLangGroupExpr) collection).expression.getKind() == NodeKind.QUERY_EXPR)) {
data.breakToParallelQueryEnv = true;
}
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, data.queryEnvs.pop());
joinClause.env = joinEnv;
data.queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, data.queryEnvs.peek(), data);
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, data.queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this, data);
}
data.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause, AnalyzerData data) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, data.queryEnvs.pop());
letClause.env = letEnv;
data.queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeNode((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause, AnalyzerData data) {
whereClause.env = handleFilterClauses(whereClause.expression, data);
}
@Override
public void visit(BLangSelectClause selectClause, AnalyzerData data) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, data.queryEnvs.pop());
selectClause.env = selectEnv;
data.queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause, AnalyzerData data) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, data.queryEnvs.pop());
doClause.env = letEnv;
data.queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause, AnalyzerData data) {
BType exprType = checkExpr(onConflictClause.expression, data.queryEnvs.peek(), symTable.errorType, data);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause, AnalyzerData data) {
BType exprType = checkExpr(limitClause.expression, data.queryEnvs.peek(), data);
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause, AnalyzerData data) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(data.queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(data.queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv, data);
onClause.rhsEnv = getEnvAfterJoinNode(data.queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : data.queryEnvs.peek(), data);
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause, AnalyzerData data) {
orderByClause.env = data.queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env, data);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode, AnalyzerData data) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this, data);
}
}
public void visit(BLangOnFailClause onFailClause, AnalyzerData data) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this, data));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression, AnalyzerData data) {
checkExpr(filterExpression, data.queryEnvs.peek(), symTable.booleanType, data);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, data.queryFinalClauses.peek(),
data.queryEnvs.pop());
data.queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, AnalyzerData data) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (data.expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, data.expType, data);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, data.expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(data.expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(data.expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType, data);
}
BType exprType = checkExpr(checkedExpr.expr, checkExprCandidateType, data);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
data.isTypeChecked = false;
data.resultType = data.expType;
return;
} else {
data.expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(Types.getReferredType(exprType), symTable.errorType);
if (Types.getReferredType(exprType).tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
data.resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.warning(checkedExpr.expr.pos,
DiagnosticWarningCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
data.resultType = checkedExpr.expr.getBType();
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : types.getAllTypes(exprType, true)) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.warning(checkedExpr.expr.pos,
DiagnosticWarningCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
data.resultType = types.checkType(checkedExpr, actualType, data.expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type, AnalyzerData data) {
BType rhsType = getCandidateType(checkedExpr, type, data);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType, data);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(data.expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = data.expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type, names.fromString(invocation.name.value), data.env);
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType, AnalyzerData data) {
boolean prevNonErrorLoggingCheck = data.nonErrorLoggingCheck;
data.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, data);
} else {
rhsType = checkExpr(clone, checkExprCandidateType, data);
}
data.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type, false)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr, AnalyzerData data) {
data.resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr, AnalyzerData data) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, data.env));
checkExpr(typeTestExpr.expr, data);
data.resultType = types.checkType(typeTestExpr, symTable.booleanType, data.expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr, AnalyzerData data) {
checkExpr(annotAccessExpr.expr, symTable.typeDesc, data);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, data.env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
data.resultType = this.types.checkType(annotAccessExpr, actualType, data.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType type, AnalyzerData data) {
BType origTargetType = Types.getReferredType(type);
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(data.expType) || !types.isSelectivelyImmutableType(data.expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, data.expType, data.env, symTable,
anonymousModelHelper, names, new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(data.expType) || !types.isSelectivelyImmutableType(data.expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, data.expType, data.env,
symTable, anonymousModelHelper, names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType,
AnalyzerData data) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, data.env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType, data);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes,
AnalyzerData data) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
data.resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr, AnalyzerData data) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(data.env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, data.env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor =
symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, data.env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, data));
data.resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, data.env, data);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
data.env = SymbolEnv.createInvocationEnv(iExpr, data.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr, data);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol, data);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env, AnalyzerData data) {
BLangInvokableNode encInvokable = env.enclInvokable;
BLangNode bLangNode = env.node;
if ((symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
bLangNode.getKind() != NodeKind.ARROW_EXPR && bLangNode.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA) &&
!encInvokable.flagSet.contains(Flag.OBJECT_CTOR)) {
return;
}
if (!symbol.closure) {
if (searchClosureVariableInExpressions(symbol, pos, env, encInvokable, bLangNode)) {
return;
}
}
BLangNode node = bLangNode;
if (isObjectCtorClass(node)) {
BLangClassDefinition classDef = (BLangClassDefinition) node;
OCEDynamicEnvironmentData oceData = classDef.oceEnvData;
BLangFunction currentFunc = (BLangFunction) encInvokable;
if ((currentFunc != null) && !currentFunc.attachedFunction &&
!(currentFunc.symbol.receiverSymbol == symbol)) {
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(oceData.capturedClosureEnv, symbol.name,
SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !resolvedSymbol.closure) {
if (resolvedSymbol.owner.getKind() != SymbolKind.PACKAGE) {
updateObjectCtorClosureSymbols(pos, currentFunc, resolvedSymbol, classDef, data);
return;
}
}
}
}
SymbolEnv cEnv = env;
while (node != null) {
if (node.getKind() == NodeKind.FUNCTION) {
BLangFunction function = (BLangFunction) node;
if (!function.flagSet.contains(Flag.OBJECT_CTOR) && !function.flagSet.contains(Flag.ATTACHED)) {
break;
}
}
if (!symbol.closure) {
if (searchClosureVariableInExpressions(symbol, pos, env, encInvokable, node)) {
return;
}
}
if (isObjectCtorClass(node)) {
BLangFunction currentFunction = (BLangFunction) encInvokable;
if ((currentFunction != null) && currentFunction.attachedFunction &&
(currentFunction.symbol.receiverSymbol == symbol)) {
return;
}
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE);
BLangClassDefinition classDef = (BLangClassDefinition) node;
if (resolvedSymbol != symTable.notFoundSymbol) {
if (resolvedSymbol.owner.getKind() == SymbolKind.PACKAGE) {
break;
}
updateObjectCtorClosureSymbols(pos, currentFunction, resolvedSymbol, classDef, data);
return;
}
break;
}
SymbolEnv enclEnv = cEnv.enclEnv;
if (enclEnv == null) {
break;
}
cEnv = enclEnv;
node = cEnv.node;
}
}
private boolean isObjectCtorClass(BLangNode node) {
return node.getKind() == NodeKind.CLASS_DEFN &&
((BLangClassDefinition) node).flagSet.contains(Flag.OBJECT_CTOR);
}
private boolean searchClosureVariableInExpressions(BSymbol symbol, Location pos, SymbolEnv env,
BLangInvokableNode encInvokable, BLangNode bLangNode) {
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
if (bLangNode.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) bLangNode).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) bLangNode).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
return false;
}
private void updateObjectCtorClosureSymbols(Location pos, BLangFunction currentFunction, BSymbol resolvedSymbol,
BLangClassDefinition classDef, AnalyzerData data) {
classDef.hasClosureVars = true;
resolvedSymbol.closure = true;
if (currentFunction != null) {
currentFunction.closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
OCEDynamicEnvironmentData oceEnvData = classDef.oceEnvData;
if (currentFunction != null && (currentFunction.symbol.params.contains(resolvedSymbol)
|| (currentFunction.symbol.restParam == resolvedSymbol))) {
oceEnvData.closureFuncSymbols.add(resolvedSymbol);
} else {
oceEnvData.closureBlockSymbols.add(resolvedSymbol);
}
updateProceedingClasses(data.env.enclEnv, oceEnvData, classDef);
}
private void updateProceedingClasses(SymbolEnv envArg, OCEDynamicEnvironmentData oceEnvData,
BLangClassDefinition origClassDef) {
SymbolEnv localEnv = envArg;
while (localEnv != null) {
BLangNode node = localEnv.node;
if (node.getKind() == NodeKind.PACKAGE) {
break;
}
if (node.getKind() == NodeKind.CLASS_DEFN) {
BLangClassDefinition classDef = (BLangClassDefinition) node;
if (classDef != origClassDef) {
classDef.hasClosureVars = true;
OCEDynamicEnvironmentData parentOceData = classDef.oceEnvData;
oceEnvData.parents.push(classDef);
parentOceData.closureFuncSymbols.addAll(oceEnvData.closureFuncSymbols);
parentOceData.closureBlockSymbols.addAll(oceEnvData.closureBlockSymbols);
}
}
localEnv = localEnv.enclEnv;
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& !Symbols.isNative(funcSymbol);
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType, AnalyzerData data) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(errorConstructorExpr.namedArgs.size());
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = checkErrCtrTargetTypeAndSetSymbol(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, target, data);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, data);
} else {
checkExpr(namedArgsExpression, target, data);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType checkErrCtrTargetTypeAndSetSymbol(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
BType type = Types.getReferredType(expectedType);
if (type == symTable.semanticError) {
return symTable.semanticError;
}
if (type.tag == TypeTags.MAP) {
return ((BMapType) type).constraint;
}
if (type.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) type;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
namedArgsExpression.varSymbol = targetField.symbol;
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType, AnalyzerData data) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, data.env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
iExpr.pos, data.env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
funcSymbol = invocableField;
iExpr.functionPointerInvocation = true;
}
}
if (funcSymbol == symTable.notFoundSymbol || Types.getReferredType(funcSymbol.type).tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType, data)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
data.resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr, data);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType,
AnalyzerData data) {
if (checkInvalidActionInvocation(aInv)) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType());
data.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver.resolveObjectMethod(aInv.pos, data.env,
remoteMethodQName, (BObjectTypeSymbol) Types.getReferredType(expType).tsymbol);
if (remoteFuncSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
aInv.pos, data.env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
remoteFuncSymbol = invocableField;
aInv.functionPointerInvocation = true;
}
}
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType, data)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
data.resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
data.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv, data);
}
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) {
return aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) !=
SymTag.ENDPOINT) && !aInv.async);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType, AnalyzerData data) {
return getLangLibMethod(iExpr, bType, data) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType, AnalyzerData data) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName, data.env);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = data.env;
data.env = SymbolEnv.createInvocationEnv(iExpr, data.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr, data);
data.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr, AnalyzerData data) {
BType actualType = checkInvocationParam(iExpr, data);
data.resultType = types.checkType(iExpr, actualType, data.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(openIncRecordParams.get(0).type)).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
BType paramType = Types.getReferredType(paramSymbol.type);
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramType.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramType).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramType).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr, AnalyzerData data) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
BType invocableType = Types.getReferredType(iExpr.symbol.type);
if (invocableType.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invocableType).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(symbol.type)).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields, data);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields, AnalyzerData data) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) Types.getReferredType(invokableSymbol.type);
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
int nonRestArgCount = nonRestArgs.size();
List<BVarSymbol> requiredParams = new ArrayList<>(nonRestParams.size() + nonRestArgCount);
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(incRecordParams.size() + nonRestArgCount);
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgCount; i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
BType actualType = arg.getBType();
if (Types.getReferredType(expectedType) == symTable.charStringType) {
arg.cloneAttempt++;
BLangExpression clonedArg = nodeCloner.cloneNode(arg);
BType argType = checkExprSilent(clonedArg, expectedType, data);
if (argType != symTable.semanticError) {
actualType = argType;
}
}
types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, param.type, iExpr.langLibInvocation, data);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields, data);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, varSym.type, iExpr.langLibInvocation, data);
((BLangNamedArgsExpression) arg).varSymbol = varSym;
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (Types.getReferredType(requiredParam.type) ==
Types.getReferredType(requiredIncRecordParam.owner.type)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = data.env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL, data);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, elementType, true, data);
}
checkTypeParamExpr(vararg, listTypeRestArg, iExpr.langLibInvocation, data);
iExpr.restArgs.add(vararg);
restType = data.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, actualType, iExpr.langLibInvocation, data);
} else {
checkTypeParamExpr(vararg, listTypeRestArg, iExpr.langLibInvocation, data);
}
restType = data.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, elementType, true, data);
if (restType != symTable.semanticError && data.resultType == symTable.semanticError) {
restType = data.resultType;
}
}
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, memType, true, data);
if (restType != symTable.semanticError && data.resultType == symTable.semanticError) {
restType = data.resultType;
}
}
} else {
for (BLangExpression restArg : iExpr.restArgs) {
checkExpr(restArg, symTable.semanticError, data);
}
data.resultType = symTable.semanticError;
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(data.env, bInvokableType.getReturnType());
long invokableSymbolFlags = invokableSymbol.flags;
if (restType != symTable.semanticError && (Symbols.isFlagOn(invokableSymbolFlags, Flags.INTERFACE)
|| Symbols.isFlagOn(invokableSymbolFlags, Flags.NATIVE)) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, data.expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && (iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
List<BLangExpression> argExprs = iExpr.argExprs;
BLangExpression keyFunction = null;
for (int i = 0; i < argExprs.size(); i++) {
BLangExpression arg = argExprs.get(i);
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangNamedArgsExpression argExpr = (BLangNamedArgsExpression) arg;
if (argExpr.name.value.equals("key")) {
keyFunction = argExpr.expr;
break;
}
} else if (i == 2) {
keyFunction = arg;
break;
}
}
BLangExpression arrExpr = argExprs.get(0);
BType arrType = arrExpr.getBType();
boolean isOrderedType = types.isOrderedType(arrType, false);
if (keyFunction == null) {
if (!isOrderedType) {
dlog.error(arrExpr.pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, arrType);
}
return;
}
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!isOrderedType) {
dlog.error(arrExpr.pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE, arrType);
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields,
AnalyzerData data) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType =
(BRecordType) Types.getReferredType(incRecordParamAllowAdditionalFields.type);
checkExpr(expr, incRecordType.restFieldType, data);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName),
null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, BType expectedType,
boolean inferTypeForNumericLiteral, AnalyzerData data) {
checkTypeParamExpr(arg.pos, arg, expectedType, inferTypeForNumericLiteral, data);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, BType expectedType,
boolean inferTypeForNumericLiteral, AnalyzerData data) {
SymbolEnv env = data.env;
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, expectedType, data);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, expType, data);
typeParamAnalyzer.checkForTypeParamsInArg(arg, pos, inferredType, data.env, expectedType);
types.checkType(arg.pos, inferredType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
checkExpr(arg, expectedType, data);
typeParamAnalyzer.checkForTypeParamsInArg(arg, pos, arg.getBType(), data.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case ELVIS_EXPR:
case TERNARY_EXPR:
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType, AnalyzerData data) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType, data);
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = typeSymbolPair.fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, data);
BType spreadExprType = Types.getReferredType(spreadExpr.getBType());
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, data.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType, data);
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, data);
BType spreadOpMemberType = checkSpreadFieldWithMapType(spreadOpType);
if (spreadOpMemberType.tag == symTable.semanticError.tag) {
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey, data);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false, data);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types, fieldType, data.env, symTable,
anonymousModelHelper, names, new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (data.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, data.env, fieldType, data);
}
private BType checkSpreadFieldWithMapType(BType spreadOpType) {
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
return getRepresentativeBroadType(types);
case TypeTags.MAP:
return ((BMapType) spreadOpType).constraint;
case TypeTags.TYPEREFDESC:
return checkSpreadFieldWithMapType(Types.getReferredType(spreadOpType));
default:
return symTable.semanticError;
}
}
private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey,
BRecordType recordType, AnalyzerData data) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, symTable.stringType, data);
if (keyExpr.getBType() == symTable.semanticError) {
return new TypeSymbolPair(null, symTable.semanticError);
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes));
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return new TypeSymbolPair(null, symTable.semanticError);
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, data.env, fieldName, recordType.tsymbol);
BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType);
return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type);
}
private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key,
BRecordType recordType) {
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, AnalyzerData data) {
if (computedKey) {
checkExpr(keyExpr, symTable.stringType, data);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType, AnalyzerData data) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, data.env, fieldName, recordType.tsymbol);
if (Symbols.isOptional(fieldSymbol) || fieldSymbol == symTable.notFoundSymbol) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType, AnalyzerData data) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, data.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType, AnalyzerData data) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, data.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType, AnalyzerData data) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
data.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol =
symResolver.resolveObjectField(bLangFieldBasedAccess.pos, data.env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv,
AnalyzerData data) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType, data);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType, data);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs, AnalyzerData data) {
for (BLangExpression expr : exprs) {
checkExpr(expr, data);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv,
AnalyzerData data) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, data.expType, data);
} else {
exprType = checkExpr(expr, xmlElementEnv, data);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON &&
!TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName,
AnalyzerData data) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType, data);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType, data);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType type, Name fieldName,
AnalyzerData data) {
BType varRefType = Types.getReferredType(type);
if (varRefType.tag == TypeTags.RECORD) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, data.env,
fieldName, varRefType.tsymbol);
if (Symbols.isOptional(fieldSymbol) && !fieldSymbol.type.isNullable() && !fieldAccessExpr.isLValue) {
fieldAccessExpr.symbol = fieldSymbol;
return addNilForNillableAccessType(fieldSymbol.type);
}
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType, data);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
for (BType memType : memberTypes) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, data.env,
fieldName, memType.tsymbol);
if (fieldSymbol.type.isNullable() &&
isFieldOptionalInRecords(((BUnionType) varRefType), fieldName, fieldAccessExpr, data)) {
return symTable.semanticError;
}
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName, data);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private boolean isFieldOptionalInRecords(BUnionType unionType, Name fieldName,
BLangFieldBasedAccess fieldAccessExpr, AnalyzerData data) {
Set<BType> memberTypes = unionType.getMemberTypes();
for (BType memType: memberTypes) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, data.env,
fieldName, memType.tsymbol);
if (Symbols.isOptional(fieldSymbol)) {
return true;
}
}
return false;
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName, AnalyzerData data) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType =
checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType, data);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType, data);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName, data);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName, AnalyzerData data) {
BType refType = Types.getReferredType(varRefType);
if (refType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) refType, data);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) refType, data);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return addNilForNillableAccessType(fieldType);
}
Set<BType> memberTypes = ((BUnionType) refType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName, data);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes,
Name fieldName, AnalyzerData data) {
RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics();
for (BType memberType : memberTypes) {
BRecordType recordMember = (BRecordType) Types.getReferredType(memberType);
if (recordMember.getFields().containsKey(fieldName.getValue())) {
if (isNilableType(fieldAccessExpr, memberType, fieldName, data)) {
recordUnionDiagnostics.nilableInRecords.add(recordMember);
}
} else {
recordUnionDiagnostics.undeclaredInRecords.add(recordMember);
}
}
return recordUnionDiagnostics;
}
private boolean isNilableType(BLangFieldBasedAccess fieldAccessExpr, BType memberType,
Name fieldName, AnalyzerData data) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, data.env,
fieldName, memberType.tsymbol);
return fieldSymbol.type.isNullable();
}
private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName,
AnalyzerData data) {
if (varRefType.tag == TypeTags.RECORD) {
BRecordType recordVarRefType = (BRecordType) varRefType;
boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue());
if (isFieldDeclared) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS);
} else if (recordVarRefType.sealed) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType);
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName,
varRefType);
}
} else {
LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName, data);
if (recUnionInfo.hasNilableAndUndeclared()) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.UNDECLARED_AND_NILABLE_FIELDS_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords),
recUnionInfo.recordsToString(recUnionInfo.nilableInRecords));
} else if (recUnionInfo.hasUndeclared()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords));
} else if (recUnionInfo.hasNilable()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.NILABLE_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.nilableInRecords));
}
}
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName,
AnalyzerData data) {
BType actualType = symTable.semanticError;
varRefType = Types.getReferredType(varRefType);
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName, data);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName, data);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName, data);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName, data);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr, data);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr, data);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr,
AnalyzerData data) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(data.env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos, data);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
case TypeTags.TYPEREFDESC:
return getLaxFieldAccessType(((BTypeReferenceType) exprType).referredType);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName,
AnalyzerData data) {
BType actualType = symTable.semanticError;
BType referredType = Types.getReferredType(varRefType);
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (referredType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) referredType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName, data);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr, data);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr, data);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr, AnalyzerData data) {
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, symTable.stringType, data);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType, data);
if (actualType == symTable.semanticError) {
if (isConstExpr(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, symTable.intType, data);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (isConstExpr(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, symTable.intType, data);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.charStringType;
actualType = symTable.charStringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, symTable.intType, data);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) Types.getReferredType(indexBasedAccessExpr.expr.getBType());
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(tableType.fieldNameList, tableType.constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, keyTypeConstraint, data);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, keyConstraintTypes.get(i), data);
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (data.expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, data.expType, data);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
switch (indexExpr.getKind()) {
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) indexExpr;
return getConstIndex(groupExpr.expression);
case NUMERIC_LITERAL:
return (Long) ((BLangLiteral) indexExpr).value;
case UNARY_EXPR:
BLangNumericLiteral numericLiteral =
types.constructNumericLiteralFromUnaryExpr((BLangUnaryExpr) indexExpr);
return (Long) numericLiteral.value;
default:
return (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
}
private String getConstFieldName(BLangExpression indexExpr) {
switch (indexExpr.getKind()) {
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) indexExpr;
return getConstFieldName(groupExpr.expression);
case LITERAL:
return (String) ((BLangLiteral) indexExpr).value;
default:
return (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
int tag = indexExprType.tag;
if (tag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(tag)) {
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConstExpr(indexExpr) || arrayType.state == BArrayState.OPEN) {
return arrayType.eType;
}
Long indexVal = getConstIndex(indexExpr);
return indexVal >= arrayType.size || indexVal < 0 ? symTable.semanticError : arrayType.eType;
}
switch (tag) {
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> Types.getReferredType(memType).tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) Types.getReferredType(matchedType))
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.TYPEREFDESC:
return checkArrayIndexBasedAccess(indexBasedAccess, Types.getReferredType(indexExprType),
arrayType);
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
int tag = currentType.tag;
if (tag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(tag)) {
if (isConstExpr(indexExpr)) {
return checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
}
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
return tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
}
switch (tag) {
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
memType = Types.getReferredType(memType);
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
break;
case TypeTags.TYPEREFDESC:
return checkTupleIndexBasedAccess(accessExpr, tuple, Types.getReferredType(currentType));
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType bType, AnalyzerData data) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.MAP) {
BType constraint = Types.getReferredType(((BMapType) type).constraint);
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType(), data);
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType, data);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType,
AnalyzerData data) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
case TypeTags.CHAR_STRING:
if (isConstExpr(indexExpr)) {
String fieldName = Utils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record, data);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record, data);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record, data);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType =
checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record, data);
if (fieldType == symTable.semanticError) {
fieldType =
checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record, data);
if (fieldType == symTable.semanticError) {
fieldType =
checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record, data);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
types.getAllTypes(currentType, true).forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType, data);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType, data);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
break;
case TypeTags.TYPEREFDESC:
return checkRecordIndexBasedAccess(accessExpr, record,
Types.getReferredType(currentType), data);
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConstExpr(BLangExpression expression) {
switch (expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
return true;
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) expression;
return isConstExpr(groupExpr.expression);
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case UNARY_EXPR:
if (((BLangUnaryExpr) expression).expr.getKind() == NodeKind.NUMERIC_LITERAL &&
(OperatorKind.ADD.equals(((BLangUnaryExpr) expression).operator)) ||
(OperatorKind.SUB.equals(((BLangUnaryExpr) expression).operator))) {
return isConstExpr(((BLangUnaryExpr) expression).expr);
} else {
return false;
}
default:
return false;
}
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType, AnalyzerData data) {
SymbolEnv env = data.env;
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL, data);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, symTable.stringType, data);
BType exprType = checkExpr(expression, expType, data);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, symTable.readonlyType, data) :
checkExpr(expression, expType, data),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType spreadOpType = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr,
expType, data);
BType type = Types.getReferredType(spreadOpType);
if (type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ?
checkExpr(varNameField, symTable.readonlyType, data) :
checkExpr(varNameField, expType, data),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin, AnalyzerData data) {
SymbolEnv env = data.env;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(Iterable<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType, AnalyzerData data) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, data.env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral, AnalyzerData data) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, childType,
data.env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild, data);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private void handleObjectConstrExprForReadOnly(
BLangObjectConstructorExpression objectCtorExpr, BObjectType actualObjectType, SymbolEnv env,
boolean logErrors, AnalyzerData data) {
BLangClassDefinition classDefForConstructor = objectCtorExpr.classNode;
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env, data);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, objectCtorExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env, data);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (actualObjectType.markedIsolatedness) {
return;
}
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
actualObjectType.markedIsolatedness = true;
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
private static class TypeSymbolPair {
private BVarSymbol fieldSymbol;
private BType determinedType;
public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) {
this.fieldSymbol = fieldSymbol;
this.determinedType = determinedType;
}
}
private static class RecordUnionDiagnostics {
Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>();
Set<BRecordType> nilableInRecords = new LinkedHashSet<>();
boolean hasUndeclared() {
return undeclaredInRecords.size() > 0;
}
boolean hasNilable() {
return nilableInRecords.size() > 0;
}
boolean hasNilableAndUndeclared() {
return nilableInRecords.size() > 0 && undeclaredInRecords.size() > 0;
}
String recordsToString(Set<BRecordType> recordTypeSet) {
StringBuilder recordNames = new StringBuilder();
int recordSetSize = recordTypeSet.size();
int index = 0;
for (BRecordType recordType : recordTypeSet) {
index++;
recordNames.append(recordType.tsymbol.getName().getValue());
if (recordSetSize > 1) {
if (index == recordSetSize - 1) {
recordNames.append("', and '");
} else if (index < recordSetSize) {
recordNames.append("', '");
}
}
}
return recordNames.toString();
}
}
/**
* @since 2.0.0
*/
public static class AnalyzerData {
public SymbolEnv env;
boolean isTypeChecked;
Stack<SymbolEnv> queryEnvs, prevEnvs;
Stack<BLangNode> queryFinalClauses;
boolean nonErrorLoggingCheck = false;
boolean checkWithinQueryExpr = false;
boolean breakToParallelQueryEnv = false;
int letCount = 0;
DiagnosticCode diagCode;
BType expType;
BType resultType;
}
} | class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
} |
I move this to another PR. | private void configureCheckpointing() {
CheckpointConfig cfg = streamGraph.getCheckpointConfig();
long interval = cfg.getCheckpointInterval();
if (interval < MINIMAL_CHECKPOINT_TIME) {
interval = Long.MAX_VALUE;
}
List<JobVertexID> triggerVertices = new ArrayList<>();
List<JobVertexID> ackVertices = new ArrayList<>(jobVertices.size());
List<JobVertexID> commitVertices = new ArrayList<>(jobVertices.size());
for (JobVertex vertex : jobVertices.values()) {
if (vertex.isInputVertex()) {
triggerVertices.add(vertex.getID());
}
commitVertices.add(vertex.getID());
ackVertices.add(vertex.getID());
}
CheckpointRetentionPolicy retentionAfterTermination;
if (cfg.isExternalizedCheckpointsEnabled()) {
CheckpointConfig.ExternalizedCheckpointCleanup cleanup = cfg.getExternalizedCheckpointCleanup();
if (cleanup == null) {
throw new IllegalStateException("Externalized checkpoints enabled, but no cleanup mode configured.");
}
retentionAfterTermination = cleanup.deleteOnCancellation() ?
CheckpointRetentionPolicy.RETAIN_ON_FAILURE :
CheckpointRetentionPolicy.RETAIN_ON_CANCELLATION;
} else {
retentionAfterTermination = CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION;
}
CheckpointingMode mode = cfg.getCheckpointingMode();
boolean isExactlyOnce = cfg.isCheckpointingEnabled() && mode == CheckpointingMode.EXACTLY_ONCE;
final ArrayList<MasterTriggerRestoreHook.Factory> hooks = new ArrayList<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
if (node.getOperatorFactory() instanceof UdfStreamOperatorFactory) {
Function f = ((UdfStreamOperatorFactory) node.getOperatorFactory()).getUserFunction();
if (f instanceof WithMasterCheckpointHook) {
hooks.add(new FunctionMasterCheckpointHookFactory((WithMasterCheckpointHook<?>) f));
}
}
}
final SerializedValue<MasterTriggerRestoreHook.Factory[]> serializedHooks;
if (hooks.isEmpty()) {
serializedHooks = null;
} else {
try {
MasterTriggerRestoreHook.Factory[] asArray =
hooks.toArray(new MasterTriggerRestoreHook.Factory[hooks.size()]);
serializedHooks = new SerializedValue<>(asArray);
}
catch (IOException e) {
throw new FlinkRuntimeException("Trigger/restore hook is not serializable", e);
}
}
final SerializedValue<StateBackend> serializedStateBackend;
if (streamGraph.getStateBackend() == null) {
serializedStateBackend = null;
} else {
try {
serializedStateBackend =
new SerializedValue<StateBackend>(streamGraph.getStateBackend());
}
catch (IOException e) {
throw new FlinkRuntimeException("State backend is not serializable", e);
}
}
JobCheckpointingSettings settings = new JobCheckpointingSettings(
triggerVertices,
ackVertices,
commitVertices,
new CheckpointCoordinatorConfiguration(
interval,
cfg.getCheckpointTimeout(),
cfg.getMinPauseBetweenCheckpoints(),
cfg.getMaxConcurrentCheckpoints(),
retentionAfterTermination,
isExactlyOnce,
cfg.isPreferCheckpointForRecovery(),
cfg.getTolerableCheckpointFailureNumber()),
serializedStateBackend,
serializedHooks);
jobGraph.setSnapshotSettings(settings);
} | boolean isExactlyOnce = cfg.isCheckpointingEnabled() && mode == CheckpointingMode.EXACTLY_ONCE; | private void configureCheckpointing() {
CheckpointConfig cfg = streamGraph.getCheckpointConfig();
long interval = cfg.getCheckpointInterval();
if (interval < MINIMAL_CHECKPOINT_TIME) {
interval = Long.MAX_VALUE;
}
List<JobVertexID> triggerVertices = new ArrayList<>();
List<JobVertexID> ackVertices = new ArrayList<>(jobVertices.size());
List<JobVertexID> commitVertices = new ArrayList<>(jobVertices.size());
for (JobVertex vertex : jobVertices.values()) {
if (vertex.isInputVertex()) {
triggerVertices.add(vertex.getID());
}
commitVertices.add(vertex.getID());
ackVertices.add(vertex.getID());
}
CheckpointRetentionPolicy retentionAfterTermination;
if (cfg.isExternalizedCheckpointsEnabled()) {
CheckpointConfig.ExternalizedCheckpointCleanup cleanup = cfg.getExternalizedCheckpointCleanup();
if (cleanup == null) {
throw new IllegalStateException("Externalized checkpoints enabled, but no cleanup mode configured.");
}
retentionAfterTermination = cleanup.deleteOnCancellation() ?
CheckpointRetentionPolicy.RETAIN_ON_FAILURE :
CheckpointRetentionPolicy.RETAIN_ON_CANCELLATION;
} else {
retentionAfterTermination = CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION;
}
final ArrayList<MasterTriggerRestoreHook.Factory> hooks = new ArrayList<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
if (node.getOperatorFactory() instanceof UdfStreamOperatorFactory) {
Function f = ((UdfStreamOperatorFactory) node.getOperatorFactory()).getUserFunction();
if (f instanceof WithMasterCheckpointHook) {
hooks.add(new FunctionMasterCheckpointHookFactory((WithMasterCheckpointHook<?>) f));
}
}
}
final SerializedValue<MasterTriggerRestoreHook.Factory[]> serializedHooks;
if (hooks.isEmpty()) {
serializedHooks = null;
} else {
try {
MasterTriggerRestoreHook.Factory[] asArray =
hooks.toArray(new MasterTriggerRestoreHook.Factory[hooks.size()]);
serializedHooks = new SerializedValue<>(asArray);
}
catch (IOException e) {
throw new FlinkRuntimeException("Trigger/restore hook is not serializable", e);
}
}
final SerializedValue<StateBackend> serializedStateBackend;
if (streamGraph.getStateBackend() == null) {
serializedStateBackend = null;
} else {
try {
serializedStateBackend =
new SerializedValue<StateBackend>(streamGraph.getStateBackend());
}
catch (IOException e) {
throw new FlinkRuntimeException("State backend is not serializable", e);
}
}
JobCheckpointingSettings settings = new JobCheckpointingSettings(
triggerVertices,
ackVertices,
commitVertices,
new CheckpointCoordinatorConfiguration(
interval,
cfg.getCheckpointTimeout(),
cfg.getMinPauseBetweenCheckpoints(),
cfg.getMaxConcurrentCheckpoints(),
retentionAfterTermination,
getCheckpointingMode(cfg) == CheckpointingMode.EXACTLY_ONCE,
cfg.isUnalignedCheckpointsEnabled(),
cfg.isPreferCheckpointForRecovery(),
cfg.getTolerableCheckpointFailureNumber()),
serializedStateBackend,
serializedHooks);
jobGraph.setSnapshotSettings(settings);
} | class StreamingJobGraphGenerator {
private static final Logger LOG = LoggerFactory.getLogger(StreamingJobGraphGenerator.class);
private static final int MANAGED_MEMORY_FRACTION_SCALE = 16;
public static JobGraph createJobGraph(StreamGraph streamGraph) {
return createJobGraph(streamGraph, null);
}
public static JobGraph createJobGraph(StreamGraph streamGraph, @Nullable JobID jobID) {
return new StreamingJobGraphGenerator(streamGraph, jobID).createJobGraph();
}
private final StreamGraph streamGraph;
private final Map<Integer, JobVertex> jobVertices;
private final JobGraph jobGraph;
private final Collection<Integer> builtVertices;
private final List<StreamEdge> physicalEdgesInOrder;
private final Map<Integer, Map<Integer, StreamConfig>> chainedConfigs;
private final Map<Integer, StreamConfig> vertexConfigs;
private final Map<Integer, String> chainedNames;
private final Map<Integer, ResourceSpec> chainedMinResources;
private final Map<Integer, ResourceSpec> chainedPreferredResources;
private final Map<Integer, InputOutputFormatContainer> chainedInputOutputFormats;
private final StreamGraphHasher defaultStreamGraphHasher;
private final List<StreamGraphHasher> legacyStreamGraphHashers;
private StreamingJobGraphGenerator(StreamGraph streamGraph, @Nullable JobID jobID) {
this.streamGraph = streamGraph;
this.defaultStreamGraphHasher = new StreamGraphHasherV2();
this.legacyStreamGraphHashers = Arrays.asList(new StreamGraphUserHashHasher());
this.jobVertices = new HashMap<>();
this.builtVertices = new HashSet<>();
this.chainedConfigs = new HashMap<>();
this.vertexConfigs = new HashMap<>();
this.chainedNames = new HashMap<>();
this.chainedMinResources = new HashMap<>();
this.chainedPreferredResources = new HashMap<>();
this.chainedInputOutputFormats = new HashMap<>();
this.physicalEdgesInOrder = new ArrayList<>();
jobGraph = new JobGraph(jobID, streamGraph.getJobName());
}
private JobGraph createJobGraph() {
preValidate();
jobGraph.setScheduleMode(streamGraph.getScheduleMode());
Map<Integer, byte[]> hashes = defaultStreamGraphHasher.traverseStreamGraphAndGenerateHashes(streamGraph);
List<Map<Integer, byte[]>> legacyHashes = new ArrayList<>(legacyStreamGraphHashers.size());
for (StreamGraphHasher hasher : legacyStreamGraphHashers) {
legacyHashes.add(hasher.traverseStreamGraphAndGenerateHashes(streamGraph));
}
Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes = new HashMap<>();
setChaining(hashes, legacyHashes, chainedOperatorHashes);
setPhysicalEdges();
setSlotSharingAndCoLocation();
setManagedMemoryFraction(
Collections.unmodifiableMap(jobVertices),
Collections.unmodifiableMap(vertexConfigs),
Collections.unmodifiableMap(chainedConfigs),
id -> streamGraph.getStreamNode(id).getMinResources(),
id -> streamGraph.getStreamNode(id).getManagedMemoryWeight());
configureCheckpointing();
jobGraph.setSavepointRestoreSettings(streamGraph.getSavepointRestoreSettings());
JobGraphUtils.addUserArtifactEntries(streamGraph.getUserArtifacts(), jobGraph);
try {
jobGraph.setExecutionConfig(streamGraph.getExecutionConfig());
}
catch (IOException e) {
throw new IllegalConfigurationException("Could not serialize the ExecutionConfig." +
"This indicates that non-serializable types (like custom serializers) were registered");
}
return jobGraph;
}
@SuppressWarnings("deprecation")
private void preValidate() {
CheckpointConfig checkpointConfig = streamGraph.getCheckpointConfig();
if (checkpointConfig.isCheckpointingEnabled()) {
if (streamGraph.isIterative() && !checkpointConfig.isForceCheckpointing()) {
throw new UnsupportedOperationException(
"Checkpointing is currently not supported by default for iterative jobs, as we cannot guarantee exactly once semantics. "
+ "State checkpoints happen normally, but records in-transit during the snapshot will be lost upon failure. "
+ "\nThe user can force enable state checkpoints with the reduced guarantees by calling: env.enableCheckpointing(interval,true)");
}
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
for (StreamNode node : streamGraph.getStreamNodes()) {
StreamOperatorFactory operatorFactory = node.getOperatorFactory();
if (operatorFactory != null) {
Class<?> operatorClass = operatorFactory.getStreamOperatorClass(classLoader);
if (InputSelectable.class.isAssignableFrom(operatorClass)) {
throw new UnsupportedOperationException(
"Checkpointing is currently not supported for operators that implement InputSelectable:"
+ operatorClass.getName());
}
}
}
}
}
private void setPhysicalEdges() {
Map<Integer, List<StreamEdge>> physicalInEdgesInOrder = new HashMap<Integer, List<StreamEdge>>();
for (StreamEdge edge : physicalEdgesInOrder) {
int target = edge.getTargetId();
List<StreamEdge> inEdges = physicalInEdgesInOrder.computeIfAbsent(target, k -> new ArrayList<>());
inEdges.add(edge);
}
for (Map.Entry<Integer, List<StreamEdge>> inEdges : physicalInEdgesInOrder.entrySet()) {
int vertex = inEdges.getKey();
List<StreamEdge> edgeList = inEdges.getValue();
vertexConfigs.get(vertex).setInPhysicalEdges(edgeList);
}
}
/**
* Sets up task chains from the source {@link StreamNode} instances.
*
* <p>This will recursively create all {@link JobVertex} instances.
*/
private void setChaining(Map<Integer, byte[]> hashes, List<Map<Integer, byte[]>> legacyHashes, Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes) {
for (Integer sourceNodeId : streamGraph.getSourceIDs()) {
createChain(sourceNodeId, sourceNodeId, hashes, legacyHashes, 0, chainedOperatorHashes);
}
}
private List<StreamEdge> createChain(
Integer startNodeId,
Integer currentNodeId,
Map<Integer, byte[]> hashes,
List<Map<Integer, byte[]>> legacyHashes,
int chainIndex,
Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes) {
if (!builtVertices.contains(startNodeId)) {
List<StreamEdge> transitiveOutEdges = new ArrayList<StreamEdge>();
List<StreamEdge> chainableOutputs = new ArrayList<StreamEdge>();
List<StreamEdge> nonChainableOutputs = new ArrayList<StreamEdge>();
StreamNode currentNode = streamGraph.getStreamNode(currentNodeId);
for (StreamEdge outEdge : currentNode.getOutEdges()) {
if (isChainable(outEdge, streamGraph)) {
chainableOutputs.add(outEdge);
} else {
nonChainableOutputs.add(outEdge);
}
}
for (StreamEdge chainable : chainableOutputs) {
transitiveOutEdges.addAll(
createChain(startNodeId, chainable.getTargetId(), hashes, legacyHashes, chainIndex + 1, chainedOperatorHashes));
}
for (StreamEdge nonChainable : nonChainableOutputs) {
transitiveOutEdges.add(nonChainable);
createChain(nonChainable.getTargetId(), nonChainable.getTargetId(), hashes, legacyHashes, 0, chainedOperatorHashes);
}
List<Tuple2<byte[], byte[]>> operatorHashes =
chainedOperatorHashes.computeIfAbsent(startNodeId, k -> new ArrayList<>());
byte[] primaryHashBytes = hashes.get(currentNodeId);
OperatorID currentOperatorId = new OperatorID(primaryHashBytes);
for (Map<Integer, byte[]> legacyHash : legacyHashes) {
operatorHashes.add(new Tuple2<>(primaryHashBytes, legacyHash.get(currentNodeId)));
}
chainedNames.put(currentNodeId, createChainedName(currentNodeId, chainableOutputs));
chainedMinResources.put(currentNodeId, createChainedMinResources(currentNodeId, chainableOutputs));
chainedPreferredResources.put(currentNodeId, createChainedPreferredResources(currentNodeId, chainableOutputs));
if (currentNode.getInputFormat() != null) {
getOrCreateFormatContainer(startNodeId).addInputFormat(currentOperatorId, currentNode.getInputFormat());
}
if (currentNode.getOutputFormat() != null) {
getOrCreateFormatContainer(startNodeId).addOutputFormat(currentOperatorId, currentNode.getOutputFormat());
}
StreamConfig config = currentNodeId.equals(startNodeId)
? createJobVertex(startNodeId, hashes, legacyHashes, chainedOperatorHashes)
: new StreamConfig(new Configuration());
setVertexConfig(currentNodeId, config, chainableOutputs, nonChainableOutputs);
if (currentNodeId.equals(startNodeId)) {
config.setChainStart();
config.setChainIndex(0);
config.setOperatorName(streamGraph.getStreamNode(currentNodeId).getOperatorName());
config.setOutEdgesInOrder(transitiveOutEdges);
config.setOutEdges(streamGraph.getStreamNode(currentNodeId).getOutEdges());
for (StreamEdge edge : transitiveOutEdges) {
connect(startNodeId, edge);
}
config.setTransitiveChainedTaskConfigs(chainedConfigs.get(startNodeId));
} else {
chainedConfigs.computeIfAbsent(startNodeId, k -> new HashMap<Integer, StreamConfig>());
config.setChainIndex(chainIndex);
StreamNode node = streamGraph.getStreamNode(currentNodeId);
config.setOperatorName(node.getOperatorName());
chainedConfigs.get(startNodeId).put(currentNodeId, config);
}
config.setOperatorID(currentOperatorId);
if (chainableOutputs.isEmpty()) {
config.setChainEnd();
}
return transitiveOutEdges;
} else {
return new ArrayList<>();
}
}
private InputOutputFormatContainer getOrCreateFormatContainer(Integer startNodeId) {
return chainedInputOutputFormats
.computeIfAbsent(startNodeId, k -> new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader()));
}
private String createChainedName(Integer vertexID, List<StreamEdge> chainedOutputs) {
String operatorName = streamGraph.getStreamNode(vertexID).getOperatorName();
if (chainedOutputs.size() > 1) {
List<String> outputChainedNames = new ArrayList<>();
for (StreamEdge chainable : chainedOutputs) {
outputChainedNames.add(chainedNames.get(chainable.getTargetId()));
}
return operatorName + " -> (" + StringUtils.join(outputChainedNames, ", ") + ")";
} else if (chainedOutputs.size() == 1) {
return operatorName + " -> " + chainedNames.get(chainedOutputs.get(0).getTargetId());
} else {
return operatorName;
}
}
private ResourceSpec createChainedMinResources(Integer vertexID, List<StreamEdge> chainedOutputs) {
ResourceSpec minResources = streamGraph.getStreamNode(vertexID).getMinResources();
for (StreamEdge chainable : chainedOutputs) {
minResources = minResources.merge(chainedMinResources.get(chainable.getTargetId()));
}
return minResources;
}
private ResourceSpec createChainedPreferredResources(Integer vertexID, List<StreamEdge> chainedOutputs) {
ResourceSpec preferredResources = streamGraph.getStreamNode(vertexID).getPreferredResources();
for (StreamEdge chainable : chainedOutputs) {
preferredResources = preferredResources.merge(chainedPreferredResources.get(chainable.getTargetId()));
}
return preferredResources;
}
private StreamConfig createJobVertex(
Integer streamNodeId,
Map<Integer, byte[]> hashes,
List<Map<Integer, byte[]>> legacyHashes,
Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes) {
JobVertex jobVertex;
StreamNode streamNode = streamGraph.getStreamNode(streamNodeId);
byte[] hash = hashes.get(streamNodeId);
if (hash == null) {
throw new IllegalStateException("Cannot find node hash. " +
"Did you generate them before calling this method?");
}
JobVertexID jobVertexId = new JobVertexID(hash);
List<JobVertexID> legacyJobVertexIds = new ArrayList<>(legacyHashes.size());
for (Map<Integer, byte[]> legacyHash : legacyHashes) {
hash = legacyHash.get(streamNodeId);
if (null != hash) {
legacyJobVertexIds.add(new JobVertexID(hash));
}
}
List<Tuple2<byte[], byte[]>> chainedOperators = chainedOperatorHashes.get(streamNodeId);
List<OperatorID> chainedOperatorVertexIds = new ArrayList<>();
List<OperatorID> userDefinedChainedOperatorVertexIds = new ArrayList<>();
if (chainedOperators != null) {
for (Tuple2<byte[], byte[]> chainedOperator : chainedOperators) {
chainedOperatorVertexIds.add(new OperatorID(chainedOperator.f0));
userDefinedChainedOperatorVertexIds.add(chainedOperator.f1 != null ? new OperatorID(chainedOperator.f1) : null);
}
}
if (chainedInputOutputFormats.containsKey(streamNodeId)) {
jobVertex = new InputOutputFormatVertex(
chainedNames.get(streamNodeId),
jobVertexId,
legacyJobVertexIds,
chainedOperatorVertexIds,
userDefinedChainedOperatorVertexIds);
chainedInputOutputFormats
.get(streamNodeId)
.write(new TaskConfig(jobVertex.getConfiguration()));
} else {
jobVertex = new JobVertex(
chainedNames.get(streamNodeId),
jobVertexId,
legacyJobVertexIds,
chainedOperatorVertexIds,
userDefinedChainedOperatorVertexIds);
}
jobVertex.setResources(chainedMinResources.get(streamNodeId), chainedPreferredResources.get(streamNodeId));
jobVertex.setInvokableClass(streamNode.getJobVertexClass());
int parallelism = streamNode.getParallelism();
if (parallelism > 0) {
jobVertex.setParallelism(parallelism);
} else {
parallelism = jobVertex.getParallelism();
}
jobVertex.setMaxParallelism(streamNode.getMaxParallelism());
if (LOG.isDebugEnabled()) {
LOG.debug("Parallelism set: {} for {}", parallelism, streamNodeId);
}
jobVertex.setInputDependencyConstraint(streamGraph.getExecutionConfig().getDefaultInputDependencyConstraint());
jobVertices.put(streamNodeId, jobVertex);
builtVertices.add(streamNodeId);
jobGraph.addVertex(jobVertex);
return new StreamConfig(jobVertex.getConfiguration());
}
@SuppressWarnings("unchecked")
private void setVertexConfig(Integer vertexID, StreamConfig config,
List<StreamEdge> chainableOutputs, List<StreamEdge> nonChainableOutputs) {
StreamNode vertex = streamGraph.getStreamNode(vertexID);
config.setVertexID(vertexID);
config.setBufferTimeout(vertex.getBufferTimeout());
config.setTypeSerializersIn(vertex.getTypeSerializersIn());
config.setTypeSerializerOut(vertex.getTypeSerializerOut());
for (StreamEdge edge : chainableOutputs) {
if (edge.getOutputTag() != null) {
config.setTypeSerializerSideOut(
edge.getOutputTag(),
edge.getOutputTag().getTypeInfo().createSerializer(streamGraph.getExecutionConfig())
);
}
}
for (StreamEdge edge : nonChainableOutputs) {
if (edge.getOutputTag() != null) {
config.setTypeSerializerSideOut(
edge.getOutputTag(),
edge.getOutputTag().getTypeInfo().createSerializer(streamGraph.getExecutionConfig())
);
}
}
config.setStreamOperatorFactory(vertex.getOperatorFactory());
config.setOutputSelectors(vertex.getOutputSelectors());
config.setNumberOfOutputs(nonChainableOutputs.size());
config.setNonChainedOutputs(nonChainableOutputs);
config.setChainedOutputs(chainableOutputs);
config.setTimeCharacteristic(streamGraph.getTimeCharacteristic());
final CheckpointConfig checkpointCfg = streamGraph.getCheckpointConfig();
config.setStateBackend(streamGraph.getStateBackend());
config.setCheckpointingEnabled(checkpointCfg.isCheckpointingEnabled());
if (checkpointCfg.isCheckpointingEnabled()) {
config.setCheckpointMode(checkpointCfg.getCheckpointingMode());
}
else {
config.setCheckpointMode(CheckpointingMode.AT_LEAST_ONCE);
}
for (int i = 0; i < vertex.getStatePartitioners().length; i++) {
config.setStatePartitioner(i, vertex.getStatePartitioners()[i]);
}
config.setStateKeySerializer(vertex.getStateKeySerializer());
Class<? extends AbstractInvokable> vertexClass = vertex.getJobVertexClass();
if (vertexClass.equals(StreamIterationHead.class)
|| vertexClass.equals(StreamIterationTail.class)) {
config.setIterationId(streamGraph.getBrokerID(vertexID));
config.setIterationWaitTime(streamGraph.getLoopTimeout(vertexID));
}
vertexConfigs.put(vertexID, config);
}
private void connect(Integer headOfChain, StreamEdge edge) {
physicalEdgesInOrder.add(edge);
Integer downStreamVertexID = edge.getTargetId();
JobVertex headVertex = jobVertices.get(headOfChain);
JobVertex downStreamVertex = jobVertices.get(downStreamVertexID);
StreamConfig downStreamConfig = new StreamConfig(downStreamVertex.getConfiguration());
downStreamConfig.setNumberOfInputs(downStreamConfig.getNumberOfInputs() + 1);
StreamPartitioner<?> partitioner = edge.getPartitioner();
ResultPartitionType resultPartitionType;
switch (edge.getShuffleMode()) {
case PIPELINED:
resultPartitionType = ResultPartitionType.PIPELINED_BOUNDED;
break;
case BATCH:
resultPartitionType = ResultPartitionType.BLOCKING;
break;
case UNDEFINED:
resultPartitionType = streamGraph.isBlockingConnectionsBetweenChains() ?
ResultPartitionType.BLOCKING : ResultPartitionType.PIPELINED_BOUNDED;
break;
default:
throw new UnsupportedOperationException("Data exchange mode " +
edge.getShuffleMode() + " is not supported yet.");
}
JobEdge jobEdge;
if (partitioner instanceof ForwardPartitioner || partitioner instanceof RescalePartitioner) {
jobEdge = downStreamVertex.connectNewDataSetAsInput(
headVertex,
DistributionPattern.POINTWISE,
resultPartitionType);
} else {
jobEdge = downStreamVertex.connectNewDataSetAsInput(
headVertex,
DistributionPattern.ALL_TO_ALL,
resultPartitionType);
}
jobEdge.setShipStrategyName(partitioner.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("CONNECTED: {} - {} -> {}", partitioner.getClass().getSimpleName(),
headOfChain, downStreamVertexID);
}
}
public static boolean isChainable(StreamEdge edge, StreamGraph streamGraph) {
StreamNode upStreamVertex = streamGraph.getSourceVertex(edge);
StreamNode downStreamVertex = streamGraph.getTargetVertex(edge);
return downStreamVertex.getInEdges().size() == 1
&& upStreamVertex.isSameSlotSharingGroup(downStreamVertex)
&& areOperatorsChainable(upStreamVertex, downStreamVertex, streamGraph)
&& (edge.getPartitioner() instanceof ForwardPartitioner)
&& edge.getShuffleMode() != ShuffleMode.BATCH
&& upStreamVertex.getParallelism() == downStreamVertex.getParallelism()
&& streamGraph.isChainingEnabled();
}
@VisibleForTesting
static boolean areOperatorsChainable(
StreamNode upStreamVertex,
StreamNode downStreamVertex,
StreamGraph streamGraph) {
StreamOperatorFactory<?> upStreamOperator = upStreamVertex.getOperatorFactory();
StreamOperatorFactory<?> downStreamOperator = downStreamVertex.getOperatorFactory();
if (downStreamOperator == null || upStreamOperator == null) {
return false;
}
if (upStreamOperator.getChainingStrategy() == ChainingStrategy.NEVER ||
downStreamOperator.getChainingStrategy() != ChainingStrategy.ALWAYS) {
return false;
}
if (downStreamOperator instanceof YieldingOperatorFactory) {
return !getHeadOperator(upStreamVertex, streamGraph).isStreamSource();
}
return true;
}
/**
* Backtraces the head of an operator chain.
*/
private static StreamOperatorFactory<?> getHeadOperator(StreamNode upStreamVertex, StreamGraph streamGraph) {
if (upStreamVertex.getInEdges().size() == 1 && isChainable(upStreamVertex.getInEdges().get(0), streamGraph)) {
return getHeadOperator(streamGraph.getSourceVertex(upStreamVertex.getInEdges().get(0)), streamGraph);
}
return upStreamVertex.getOperatorFactory();
}
private void setSlotSharingAndCoLocation() {
setSlotSharing();
setCoLocation();
}
private void setSlotSharing() {
final Map<String, SlotSharingGroup> specifiedSlotSharingGroups = new HashMap<>();
final Map<JobVertexID, SlotSharingGroup> vertexRegionSlotSharingGroups = buildVertexRegionSlotSharingGroups();
for (Entry<Integer, JobVertex> entry : jobVertices.entrySet()) {
final JobVertex vertex = entry.getValue();
final String slotSharingGroupKey = streamGraph.getStreamNode(entry.getKey()).getSlotSharingGroup();
final SlotSharingGroup effectiveSlotSharingGroup;
if (slotSharingGroupKey == null) {
effectiveSlotSharingGroup = null;
} else if (slotSharingGroupKey.equals(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP)) {
effectiveSlotSharingGroup = vertexRegionSlotSharingGroups.get(vertex.getID());
} else {
effectiveSlotSharingGroup = specifiedSlotSharingGroups.computeIfAbsent(
slotSharingGroupKey, k -> new SlotSharingGroup());
}
vertex.setSlotSharingGroup(effectiveSlotSharingGroup);
}
}
/**
* Maps a vertex to its region slot sharing group.
* If {@link StreamGraph
* returns true, all regions will be in the same slot sharing group.
*/
private Map<JobVertexID, SlotSharingGroup> buildVertexRegionSlotSharingGroups() {
final Map<JobVertexID, SlotSharingGroup> vertexRegionSlotSharingGroups = new HashMap<>();
final SlotSharingGroup defaultSlotSharingGroup = new SlotSharingGroup();
final boolean allRegionsInSameSlotSharingGroup = streamGraph.isAllVerticesInSameSlotSharingGroupByDefault();
final Set<LogicalPipelinedRegion> regions = new DefaultLogicalTopology(jobGraph).getLogicalPipelinedRegions();
for (LogicalPipelinedRegion region : regions) {
final SlotSharingGroup regionSlotSharingGroup;
if (allRegionsInSameSlotSharingGroup) {
regionSlotSharingGroup = defaultSlotSharingGroup;
} else {
regionSlotSharingGroup = new SlotSharingGroup();
}
for (JobVertexID jobVertexID : region.getVertexIDs()) {
vertexRegionSlotSharingGroups.put(jobVertexID, regionSlotSharingGroup);
}
}
return vertexRegionSlotSharingGroups;
}
private void setCoLocation() {
final Map<String, Tuple2<SlotSharingGroup, CoLocationGroup>> coLocationGroups = new HashMap<>();
for (Entry<Integer, JobVertex> entry : jobVertices.entrySet()) {
final StreamNode node = streamGraph.getStreamNode(entry.getKey());
final JobVertex vertex = entry.getValue();
final SlotSharingGroup sharingGroup = vertex.getSlotSharingGroup();
final String coLocationGroupKey = node.getCoLocationGroup();
if (coLocationGroupKey != null) {
if (sharingGroup == null) {
throw new IllegalStateException("Cannot use a co-location constraint without a slot sharing group");
}
Tuple2<SlotSharingGroup, CoLocationGroup> constraint = coLocationGroups.computeIfAbsent(
coLocationGroupKey, k -> new Tuple2<>(sharingGroup, new CoLocationGroup()));
if (constraint.f0 != sharingGroup) {
throw new IllegalStateException("Cannot co-locate operators from different slot sharing groups");
}
vertex.updateCoLocationGroup(constraint.f1);
constraint.f1.addVertex(vertex);
}
}
}
private static void setManagedMemoryFraction(
final Map<Integer, JobVertex> jobVertices,
final Map<Integer, StreamConfig> operatorConfigs,
final Map<Integer, Map<Integer, StreamConfig>> vertexChainedConfigs,
final java.util.function.Function<Integer, ResourceSpec> operatorResourceRetriever,
final java.util.function.Function<Integer, Integer> operatorManagedMemoryWeightRetriever) {
final Set<SlotSharingGroup> slotSharingGroups = Collections.newSetFromMap(new IdentityHashMap<>());
final Map<JobVertexID, Integer> vertexHeadOperators = new HashMap<>();
final Map<JobVertexID, Set<Integer>> vertexOperators = new HashMap<>();
for (Entry<Integer, JobVertex> entry : jobVertices.entrySet()) {
final int headOperatorId = entry.getKey();
final JobVertex jobVertex = entry.getValue();
final SlotSharingGroup jobVertexSlotSharingGroup = jobVertex.getSlotSharingGroup();
checkState(jobVertexSlotSharingGroup != null, "JobVertex slot sharing group must not be null");
slotSharingGroups.add(jobVertexSlotSharingGroup);
vertexHeadOperators.put(jobVertex.getID(), headOperatorId);
final Set<Integer> operatorIds = new HashSet<>();
operatorIds.add(headOperatorId);
operatorIds.addAll(vertexChainedConfigs.getOrDefault(headOperatorId, Collections.emptyMap()).keySet());
vertexOperators.put(jobVertex.getID(), operatorIds);
}
for (SlotSharingGroup slotSharingGroup : slotSharingGroups) {
setManagedMemoryFractionForSlotSharingGroup(
slotSharingGroup,
vertexHeadOperators,
vertexOperators,
operatorConfigs,
vertexChainedConfigs,
operatorResourceRetriever,
operatorManagedMemoryWeightRetriever);
}
}
private static void setManagedMemoryFractionForSlotSharingGroup(
final SlotSharingGroup slotSharingGroup,
final Map<JobVertexID, Integer> vertexHeadOperators,
final Map<JobVertexID, Set<Integer>> vertexOperators,
final Map<Integer, StreamConfig> operatorConfigs,
final Map<Integer, Map<Integer, StreamConfig>> vertexChainedConfigs,
final java.util.function.Function<Integer, ResourceSpec> operatorResourceRetriever,
final java.util.function.Function<Integer, Integer> operatorManagedMemoryWeightRetriever) {
final int groupManagedMemoryWeight = slotSharingGroup.getJobVertexIds().stream()
.flatMap(vid -> vertexOperators.get(vid).stream())
.mapToInt(operatorManagedMemoryWeightRetriever::apply)
.sum();
for (JobVertexID jobVertexID : slotSharingGroup.getJobVertexIds()) {
for (int operatorNodeId : vertexOperators.get(jobVertexID)) {
final StreamConfig operatorConfig = operatorConfigs.get(operatorNodeId);
final ResourceSpec operatorResourceSpec = operatorResourceRetriever.apply(operatorNodeId);
final int operatorManagedMemoryWeight = operatorManagedMemoryWeightRetriever.apply(operatorNodeId);
setManagedMemoryFractionForOperator(
operatorResourceSpec,
slotSharingGroup.getResourceSpec(),
operatorManagedMemoryWeight,
groupManagedMemoryWeight,
operatorConfig);
}
final int headOperatorNodeId = vertexHeadOperators.get(jobVertexID);
final StreamConfig vertexConfig = operatorConfigs.get(headOperatorNodeId);
vertexConfig.setTransitiveChainedTaskConfigs(vertexChainedConfigs.get(headOperatorNodeId));
}
}
private static void setManagedMemoryFractionForOperator(
final ResourceSpec operatorResourceSpec,
final ResourceSpec groupResourceSpec,
final int operatorManagedMemoryWeight,
final int groupManagedMemoryWeight,
final StreamConfig operatorConfig) {
final double managedMemoryFraction;
if (groupResourceSpec.equals(ResourceSpec.UNKNOWN)) {
managedMemoryFraction = groupManagedMemoryWeight > 0
? getFractionRoundedDown(operatorManagedMemoryWeight, groupManagedMemoryWeight)
: 0.0;
} else {
final long groupManagedMemoryBytes = groupResourceSpec.getManagedMemory().getBytes();
managedMemoryFraction = groupManagedMemoryBytes > 0
? getFractionRoundedDown(operatorResourceSpec.getManagedMemory().getBytes(), groupManagedMemoryBytes)
: 0.0;
}
operatorConfig.setManagedMemoryFraction(managedMemoryFraction);
}
private static double getFractionRoundedDown(final long dividend, final long divisor) {
return BigDecimal.valueOf(dividend)
.divide(BigDecimal.valueOf(divisor), MANAGED_MEMORY_FRACTION_SCALE, BigDecimal.ROUND_DOWN)
.doubleValue();
}
} | class StreamingJobGraphGenerator {
private static final Logger LOG = LoggerFactory.getLogger(StreamingJobGraphGenerator.class);
private static final int MANAGED_MEMORY_FRACTION_SCALE = 16;
public static JobGraph createJobGraph(StreamGraph streamGraph) {
return createJobGraph(streamGraph, null);
}
public static JobGraph createJobGraph(StreamGraph streamGraph, @Nullable JobID jobID) {
return new StreamingJobGraphGenerator(streamGraph, jobID).createJobGraph();
}
private final StreamGraph streamGraph;
private final Map<Integer, JobVertex> jobVertices;
private final JobGraph jobGraph;
private final Collection<Integer> builtVertices;
private final List<StreamEdge> physicalEdgesInOrder;
private final Map<Integer, Map<Integer, StreamConfig>> chainedConfigs;
private final Map<Integer, StreamConfig> vertexConfigs;
private final Map<Integer, String> chainedNames;
private final Map<Integer, ResourceSpec> chainedMinResources;
private final Map<Integer, ResourceSpec> chainedPreferredResources;
private final Map<Integer, InputOutputFormatContainer> chainedInputOutputFormats;
private final StreamGraphHasher defaultStreamGraphHasher;
private final List<StreamGraphHasher> legacyStreamGraphHashers;
private StreamingJobGraphGenerator(StreamGraph streamGraph, @Nullable JobID jobID) {
this.streamGraph = streamGraph;
this.defaultStreamGraphHasher = new StreamGraphHasherV2();
this.legacyStreamGraphHashers = Arrays.asList(new StreamGraphUserHashHasher());
this.jobVertices = new HashMap<>();
this.builtVertices = new HashSet<>();
this.chainedConfigs = new HashMap<>();
this.vertexConfigs = new HashMap<>();
this.chainedNames = new HashMap<>();
this.chainedMinResources = new HashMap<>();
this.chainedPreferredResources = new HashMap<>();
this.chainedInputOutputFormats = new HashMap<>();
this.physicalEdgesInOrder = new ArrayList<>();
jobGraph = new JobGraph(jobID, streamGraph.getJobName());
}
private JobGraph createJobGraph() {
preValidate();
jobGraph.setScheduleMode(streamGraph.getScheduleMode());
Map<Integer, byte[]> hashes = defaultStreamGraphHasher.traverseStreamGraphAndGenerateHashes(streamGraph);
List<Map<Integer, byte[]>> legacyHashes = new ArrayList<>(legacyStreamGraphHashers.size());
for (StreamGraphHasher hasher : legacyStreamGraphHashers) {
legacyHashes.add(hasher.traverseStreamGraphAndGenerateHashes(streamGraph));
}
Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes = new HashMap<>();
setChaining(hashes, legacyHashes, chainedOperatorHashes);
setPhysicalEdges();
setSlotSharingAndCoLocation();
setManagedMemoryFraction(
Collections.unmodifiableMap(jobVertices),
Collections.unmodifiableMap(vertexConfigs),
Collections.unmodifiableMap(chainedConfigs),
id -> streamGraph.getStreamNode(id).getMinResources(),
id -> streamGraph.getStreamNode(id).getManagedMemoryWeight());
configureCheckpointing();
jobGraph.setSavepointRestoreSettings(streamGraph.getSavepointRestoreSettings());
JobGraphUtils.addUserArtifactEntries(streamGraph.getUserArtifacts(), jobGraph);
try {
jobGraph.setExecutionConfig(streamGraph.getExecutionConfig());
}
catch (IOException e) {
throw new IllegalConfigurationException("Could not serialize the ExecutionConfig." +
"This indicates that non-serializable types (like custom serializers) were registered");
}
return jobGraph;
}
@SuppressWarnings("deprecation")
private void preValidate() {
CheckpointConfig checkpointConfig = streamGraph.getCheckpointConfig();
if (checkpointConfig.isCheckpointingEnabled()) {
if (streamGraph.isIterative() && !checkpointConfig.isForceCheckpointing()) {
throw new UnsupportedOperationException(
"Checkpointing is currently not supported by default for iterative jobs, as we cannot guarantee exactly once semantics. "
+ "State checkpoints happen normally, but records in-transit during the snapshot will be lost upon failure. "
+ "\nThe user can force enable state checkpoints with the reduced guarantees by calling: env.enableCheckpointing(interval,true)");
}
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
for (StreamNode node : streamGraph.getStreamNodes()) {
StreamOperatorFactory operatorFactory = node.getOperatorFactory();
if (operatorFactory != null) {
Class<?> operatorClass = operatorFactory.getStreamOperatorClass(classLoader);
if (InputSelectable.class.isAssignableFrom(operatorClass)) {
throw new UnsupportedOperationException(
"Checkpointing is currently not supported for operators that implement InputSelectable:"
+ operatorClass.getName());
}
}
}
}
}
private void setPhysicalEdges() {
Map<Integer, List<StreamEdge>> physicalInEdgesInOrder = new HashMap<Integer, List<StreamEdge>>();
for (StreamEdge edge : physicalEdgesInOrder) {
int target = edge.getTargetId();
List<StreamEdge> inEdges = physicalInEdgesInOrder.computeIfAbsent(target, k -> new ArrayList<>());
inEdges.add(edge);
}
for (Map.Entry<Integer, List<StreamEdge>> inEdges : physicalInEdgesInOrder.entrySet()) {
int vertex = inEdges.getKey();
List<StreamEdge> edgeList = inEdges.getValue();
vertexConfigs.get(vertex).setInPhysicalEdges(edgeList);
}
}
/**
* Sets up task chains from the source {@link StreamNode} instances.
*
* <p>This will recursively create all {@link JobVertex} instances.
*/
private void setChaining(Map<Integer, byte[]> hashes, List<Map<Integer, byte[]>> legacyHashes, Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes) {
for (Integer sourceNodeId : streamGraph.getSourceIDs()) {
createChain(sourceNodeId, sourceNodeId, hashes, legacyHashes, 0, chainedOperatorHashes);
}
}
private List<StreamEdge> createChain(
Integer startNodeId,
Integer currentNodeId,
Map<Integer, byte[]> hashes,
List<Map<Integer, byte[]>> legacyHashes,
int chainIndex,
Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes) {
if (!builtVertices.contains(startNodeId)) {
List<StreamEdge> transitiveOutEdges = new ArrayList<StreamEdge>();
List<StreamEdge> chainableOutputs = new ArrayList<StreamEdge>();
List<StreamEdge> nonChainableOutputs = new ArrayList<StreamEdge>();
StreamNode currentNode = streamGraph.getStreamNode(currentNodeId);
for (StreamEdge outEdge : currentNode.getOutEdges()) {
if (isChainable(outEdge, streamGraph)) {
chainableOutputs.add(outEdge);
} else {
nonChainableOutputs.add(outEdge);
}
}
for (StreamEdge chainable : chainableOutputs) {
transitiveOutEdges.addAll(
createChain(startNodeId, chainable.getTargetId(), hashes, legacyHashes, chainIndex + 1, chainedOperatorHashes));
}
for (StreamEdge nonChainable : nonChainableOutputs) {
transitiveOutEdges.add(nonChainable);
createChain(nonChainable.getTargetId(), nonChainable.getTargetId(), hashes, legacyHashes, 0, chainedOperatorHashes);
}
List<Tuple2<byte[], byte[]>> operatorHashes =
chainedOperatorHashes.computeIfAbsent(startNodeId, k -> new ArrayList<>());
byte[] primaryHashBytes = hashes.get(currentNodeId);
OperatorID currentOperatorId = new OperatorID(primaryHashBytes);
for (Map<Integer, byte[]> legacyHash : legacyHashes) {
operatorHashes.add(new Tuple2<>(primaryHashBytes, legacyHash.get(currentNodeId)));
}
chainedNames.put(currentNodeId, createChainedName(currentNodeId, chainableOutputs));
chainedMinResources.put(currentNodeId, createChainedMinResources(currentNodeId, chainableOutputs));
chainedPreferredResources.put(currentNodeId, createChainedPreferredResources(currentNodeId, chainableOutputs));
if (currentNode.getInputFormat() != null) {
getOrCreateFormatContainer(startNodeId).addInputFormat(currentOperatorId, currentNode.getInputFormat());
}
if (currentNode.getOutputFormat() != null) {
getOrCreateFormatContainer(startNodeId).addOutputFormat(currentOperatorId, currentNode.getOutputFormat());
}
StreamConfig config = currentNodeId.equals(startNodeId)
? createJobVertex(startNodeId, hashes, legacyHashes, chainedOperatorHashes)
: new StreamConfig(new Configuration());
setVertexConfig(currentNodeId, config, chainableOutputs, nonChainableOutputs);
if (currentNodeId.equals(startNodeId)) {
config.setChainStart();
config.setChainIndex(0);
config.setOperatorName(streamGraph.getStreamNode(currentNodeId).getOperatorName());
config.setOutEdgesInOrder(transitiveOutEdges);
config.setOutEdges(streamGraph.getStreamNode(currentNodeId).getOutEdges());
for (StreamEdge edge : transitiveOutEdges) {
connect(startNodeId, edge);
}
config.setTransitiveChainedTaskConfigs(chainedConfigs.get(startNodeId));
} else {
chainedConfigs.computeIfAbsent(startNodeId, k -> new HashMap<Integer, StreamConfig>());
config.setChainIndex(chainIndex);
StreamNode node = streamGraph.getStreamNode(currentNodeId);
config.setOperatorName(node.getOperatorName());
chainedConfigs.get(startNodeId).put(currentNodeId, config);
}
config.setOperatorID(currentOperatorId);
if (chainableOutputs.isEmpty()) {
config.setChainEnd();
}
return transitiveOutEdges;
} else {
return new ArrayList<>();
}
}
private InputOutputFormatContainer getOrCreateFormatContainer(Integer startNodeId) {
return chainedInputOutputFormats
.computeIfAbsent(startNodeId, k -> new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader()));
}
private String createChainedName(Integer vertexID, List<StreamEdge> chainedOutputs) {
String operatorName = streamGraph.getStreamNode(vertexID).getOperatorName();
if (chainedOutputs.size() > 1) {
List<String> outputChainedNames = new ArrayList<>();
for (StreamEdge chainable : chainedOutputs) {
outputChainedNames.add(chainedNames.get(chainable.getTargetId()));
}
return operatorName + " -> (" + StringUtils.join(outputChainedNames, ", ") + ")";
} else if (chainedOutputs.size() == 1) {
return operatorName + " -> " + chainedNames.get(chainedOutputs.get(0).getTargetId());
} else {
return operatorName;
}
}
private ResourceSpec createChainedMinResources(Integer vertexID, List<StreamEdge> chainedOutputs) {
ResourceSpec minResources = streamGraph.getStreamNode(vertexID).getMinResources();
for (StreamEdge chainable : chainedOutputs) {
minResources = minResources.merge(chainedMinResources.get(chainable.getTargetId()));
}
return minResources;
}
private ResourceSpec createChainedPreferredResources(Integer vertexID, List<StreamEdge> chainedOutputs) {
ResourceSpec preferredResources = streamGraph.getStreamNode(vertexID).getPreferredResources();
for (StreamEdge chainable : chainedOutputs) {
preferredResources = preferredResources.merge(chainedPreferredResources.get(chainable.getTargetId()));
}
return preferredResources;
}
private StreamConfig createJobVertex(
Integer streamNodeId,
Map<Integer, byte[]> hashes,
List<Map<Integer, byte[]>> legacyHashes,
Map<Integer, List<Tuple2<byte[], byte[]>>> chainedOperatorHashes) {
JobVertex jobVertex;
StreamNode streamNode = streamGraph.getStreamNode(streamNodeId);
byte[] hash = hashes.get(streamNodeId);
if (hash == null) {
throw new IllegalStateException("Cannot find node hash. " +
"Did you generate them before calling this method?");
}
JobVertexID jobVertexId = new JobVertexID(hash);
List<JobVertexID> legacyJobVertexIds = new ArrayList<>(legacyHashes.size());
for (Map<Integer, byte[]> legacyHash : legacyHashes) {
hash = legacyHash.get(streamNodeId);
if (null != hash) {
legacyJobVertexIds.add(new JobVertexID(hash));
}
}
List<Tuple2<byte[], byte[]>> chainedOperators = chainedOperatorHashes.get(streamNodeId);
List<OperatorID> chainedOperatorVertexIds = new ArrayList<>();
List<OperatorID> userDefinedChainedOperatorVertexIds = new ArrayList<>();
if (chainedOperators != null) {
for (Tuple2<byte[], byte[]> chainedOperator : chainedOperators) {
chainedOperatorVertexIds.add(new OperatorID(chainedOperator.f0));
userDefinedChainedOperatorVertexIds.add(chainedOperator.f1 != null ? new OperatorID(chainedOperator.f1) : null);
}
}
if (chainedInputOutputFormats.containsKey(streamNodeId)) {
jobVertex = new InputOutputFormatVertex(
chainedNames.get(streamNodeId),
jobVertexId,
legacyJobVertexIds,
chainedOperatorVertexIds,
userDefinedChainedOperatorVertexIds);
chainedInputOutputFormats
.get(streamNodeId)
.write(new TaskConfig(jobVertex.getConfiguration()));
} else {
jobVertex = new JobVertex(
chainedNames.get(streamNodeId),
jobVertexId,
legacyJobVertexIds,
chainedOperatorVertexIds,
userDefinedChainedOperatorVertexIds);
}
jobVertex.setResources(chainedMinResources.get(streamNodeId), chainedPreferredResources.get(streamNodeId));
jobVertex.setInvokableClass(streamNode.getJobVertexClass());
int parallelism = streamNode.getParallelism();
if (parallelism > 0) {
jobVertex.setParallelism(parallelism);
} else {
parallelism = jobVertex.getParallelism();
}
jobVertex.setMaxParallelism(streamNode.getMaxParallelism());
if (LOG.isDebugEnabled()) {
LOG.debug("Parallelism set: {} for {}", parallelism, streamNodeId);
}
jobVertex.setInputDependencyConstraint(streamGraph.getExecutionConfig().getDefaultInputDependencyConstraint());
jobVertices.put(streamNodeId, jobVertex);
builtVertices.add(streamNodeId);
jobGraph.addVertex(jobVertex);
return new StreamConfig(jobVertex.getConfiguration());
}
@SuppressWarnings("unchecked")
private void setVertexConfig(Integer vertexID, StreamConfig config,
List<StreamEdge> chainableOutputs, List<StreamEdge> nonChainableOutputs) {
StreamNode vertex = streamGraph.getStreamNode(vertexID);
config.setVertexID(vertexID);
config.setBufferTimeout(vertex.getBufferTimeout());
config.setTypeSerializersIn(vertex.getTypeSerializersIn());
config.setTypeSerializerOut(vertex.getTypeSerializerOut());
for (StreamEdge edge : chainableOutputs) {
if (edge.getOutputTag() != null) {
config.setTypeSerializerSideOut(
edge.getOutputTag(),
edge.getOutputTag().getTypeInfo().createSerializer(streamGraph.getExecutionConfig())
);
}
}
for (StreamEdge edge : nonChainableOutputs) {
if (edge.getOutputTag() != null) {
config.setTypeSerializerSideOut(
edge.getOutputTag(),
edge.getOutputTag().getTypeInfo().createSerializer(streamGraph.getExecutionConfig())
);
}
}
config.setStreamOperatorFactory(vertex.getOperatorFactory());
config.setOutputSelectors(vertex.getOutputSelectors());
config.setNumberOfOutputs(nonChainableOutputs.size());
config.setNonChainedOutputs(nonChainableOutputs);
config.setChainedOutputs(chainableOutputs);
config.setTimeCharacteristic(streamGraph.getTimeCharacteristic());
final CheckpointConfig checkpointCfg = streamGraph.getCheckpointConfig();
config.setStateBackend(streamGraph.getStateBackend());
config.setCheckpointingEnabled(checkpointCfg.isCheckpointingEnabled());
config.setUnalignedCheckpointsEnabled(checkpointCfg.isUnalignedCheckpointsEnabled());
config.setCheckpointMode(getCheckpointingMode(checkpointCfg));
for (int i = 0; i < vertex.getStatePartitioners().length; i++) {
config.setStatePartitioner(i, vertex.getStatePartitioners()[i]);
}
config.setStateKeySerializer(vertex.getStateKeySerializer());
Class<? extends AbstractInvokable> vertexClass = vertex.getJobVertexClass();
if (vertexClass.equals(StreamIterationHead.class)
|| vertexClass.equals(StreamIterationTail.class)) {
config.setIterationId(streamGraph.getBrokerID(vertexID));
config.setIterationWaitTime(streamGraph.getLoopTimeout(vertexID));
}
vertexConfigs.put(vertexID, config);
}
private CheckpointingMode getCheckpointingMode(CheckpointConfig checkpointConfig) {
CheckpointingMode checkpointingMode = checkpointConfig.getCheckpointingMode();
checkArgument(checkpointingMode == CheckpointingMode.EXACTLY_ONCE ||
checkpointingMode == CheckpointingMode.AT_LEAST_ONCE, "Unexpected checkpointing mode.");
if (checkpointConfig.isCheckpointingEnabled()) {
return checkpointingMode;
} else {
return CheckpointingMode.AT_LEAST_ONCE;
}
}
private void connect(Integer headOfChain, StreamEdge edge) {
physicalEdgesInOrder.add(edge);
Integer downStreamVertexID = edge.getTargetId();
JobVertex headVertex = jobVertices.get(headOfChain);
JobVertex downStreamVertex = jobVertices.get(downStreamVertexID);
StreamConfig downStreamConfig = new StreamConfig(downStreamVertex.getConfiguration());
downStreamConfig.setNumberOfInputs(downStreamConfig.getNumberOfInputs() + 1);
StreamPartitioner<?> partitioner = edge.getPartitioner();
ResultPartitionType resultPartitionType;
switch (edge.getShuffleMode()) {
case PIPELINED:
resultPartitionType = ResultPartitionType.PIPELINED_BOUNDED;
break;
case BATCH:
resultPartitionType = ResultPartitionType.BLOCKING;
break;
case UNDEFINED:
resultPartitionType = determineResultPartitionType(partitioner);
break;
default:
throw new UnsupportedOperationException("Data exchange mode " +
edge.getShuffleMode() + " is not supported yet.");
}
JobEdge jobEdge;
if (isPointwisePartitioner(partitioner)) {
jobEdge = downStreamVertex.connectNewDataSetAsInput(
headVertex,
DistributionPattern.POINTWISE,
resultPartitionType);
} else {
jobEdge = downStreamVertex.connectNewDataSetAsInput(
headVertex,
DistributionPattern.ALL_TO_ALL,
resultPartitionType);
}
jobEdge.setShipStrategyName(partitioner.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("CONNECTED: {} - {} -> {}", partitioner.getClass().getSimpleName(),
headOfChain, downStreamVertexID);
}
}
private static boolean isPointwisePartitioner(StreamPartitioner<?> partitioner) {
return partitioner instanceof ForwardPartitioner || partitioner instanceof RescalePartitioner;
}
private ResultPartitionType determineResultPartitionType(StreamPartitioner<?> partitioner) {
switch (streamGraph.getGlobalDataExchangeMode()) {
case ALL_EDGES_BLOCKING:
return ResultPartitionType.BLOCKING;
case FORWARD_EDGES_PIPELINED:
if (partitioner instanceof ForwardPartitioner) {
return ResultPartitionType.PIPELINED_BOUNDED;
} else {
return ResultPartitionType.BLOCKING;
}
case POINTWISE_EDGES_PIPELINED:
if (isPointwisePartitioner(partitioner)) {
return ResultPartitionType.PIPELINED_BOUNDED;
} else {
return ResultPartitionType.BLOCKING;
}
case ALL_EDGES_PIPELINED:
return ResultPartitionType.PIPELINED_BOUNDED;
default:
throw new RuntimeException("Unrecognized global data exchange mode " + streamGraph.getGlobalDataExchangeMode());
}
}
public static boolean isChainable(StreamEdge edge, StreamGraph streamGraph) {
StreamNode upStreamVertex = streamGraph.getSourceVertex(edge);
StreamNode downStreamVertex = streamGraph.getTargetVertex(edge);
return downStreamVertex.getInEdges().size() == 1
&& upStreamVertex.isSameSlotSharingGroup(downStreamVertex)
&& areOperatorsChainable(upStreamVertex, downStreamVertex, streamGraph)
&& (edge.getPartitioner() instanceof ForwardPartitioner)
&& edge.getShuffleMode() != ShuffleMode.BATCH
&& upStreamVertex.getParallelism() == downStreamVertex.getParallelism()
&& streamGraph.isChainingEnabled();
}
@VisibleForTesting
static boolean areOperatorsChainable(
StreamNode upStreamVertex,
StreamNode downStreamVertex,
StreamGraph streamGraph) {
StreamOperatorFactory<?> upStreamOperator = upStreamVertex.getOperatorFactory();
StreamOperatorFactory<?> downStreamOperator = downStreamVertex.getOperatorFactory();
if (downStreamOperator == null || upStreamOperator == null) {
return false;
}
if (upStreamOperator.getChainingStrategy() == ChainingStrategy.NEVER ||
downStreamOperator.getChainingStrategy() != ChainingStrategy.ALWAYS) {
return false;
}
if (downStreamOperator instanceof YieldingOperatorFactory) {
return !getHeadOperator(upStreamVertex, streamGraph).isStreamSource();
}
return true;
}
/**
* Backtraces the head of an operator chain.
*/
private static StreamOperatorFactory<?> getHeadOperator(StreamNode upStreamVertex, StreamGraph streamGraph) {
if (upStreamVertex.getInEdges().size() == 1 && isChainable(upStreamVertex.getInEdges().get(0), streamGraph)) {
return getHeadOperator(streamGraph.getSourceVertex(upStreamVertex.getInEdges().get(0)), streamGraph);
}
return upStreamVertex.getOperatorFactory();
}
private void setSlotSharingAndCoLocation() {
setSlotSharing();
setCoLocation();
}
private void setSlotSharing() {
final Map<String, SlotSharingGroup> specifiedSlotSharingGroups = new HashMap<>();
final Map<JobVertexID, SlotSharingGroup> vertexRegionSlotSharingGroups = buildVertexRegionSlotSharingGroups();
for (Entry<Integer, JobVertex> entry : jobVertices.entrySet()) {
final JobVertex vertex = entry.getValue();
final String slotSharingGroupKey = streamGraph.getStreamNode(entry.getKey()).getSlotSharingGroup();
final SlotSharingGroup effectiveSlotSharingGroup;
if (slotSharingGroupKey == null) {
effectiveSlotSharingGroup = null;
} else if (slotSharingGroupKey.equals(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP)) {
effectiveSlotSharingGroup = vertexRegionSlotSharingGroups.get(vertex.getID());
} else {
effectiveSlotSharingGroup = specifiedSlotSharingGroups.computeIfAbsent(
slotSharingGroupKey, k -> new SlotSharingGroup());
}
vertex.setSlotSharingGroup(effectiveSlotSharingGroup);
}
}
/**
* Maps a vertex to its region slot sharing group.
* If {@link StreamGraph
* returns true, all regions will be in the same slot sharing group.
*/
private Map<JobVertexID, SlotSharingGroup> buildVertexRegionSlotSharingGroups() {
final Map<JobVertexID, SlotSharingGroup> vertexRegionSlotSharingGroups = new HashMap<>();
final SlotSharingGroup defaultSlotSharingGroup = new SlotSharingGroup();
final boolean allRegionsInSameSlotSharingGroup = streamGraph.isAllVerticesInSameSlotSharingGroupByDefault();
final Set<DefaultLogicalPipelinedRegion> regions = new DefaultLogicalTopology(jobGraph).getLogicalPipelinedRegions();
for (DefaultLogicalPipelinedRegion region : regions) {
final SlotSharingGroup regionSlotSharingGroup;
if (allRegionsInSameSlotSharingGroup) {
regionSlotSharingGroup = defaultSlotSharingGroup;
} else {
regionSlotSharingGroup = new SlotSharingGroup();
}
for (JobVertexID jobVertexID : region.getVertexIDs()) {
vertexRegionSlotSharingGroups.put(jobVertexID, regionSlotSharingGroup);
}
}
return vertexRegionSlotSharingGroups;
}
private void setCoLocation() {
final Map<String, Tuple2<SlotSharingGroup, CoLocationGroup>> coLocationGroups = new HashMap<>();
for (Entry<Integer, JobVertex> entry : jobVertices.entrySet()) {
final StreamNode node = streamGraph.getStreamNode(entry.getKey());
final JobVertex vertex = entry.getValue();
final SlotSharingGroup sharingGroup = vertex.getSlotSharingGroup();
final String coLocationGroupKey = node.getCoLocationGroup();
if (coLocationGroupKey != null) {
if (sharingGroup == null) {
throw new IllegalStateException("Cannot use a co-location constraint without a slot sharing group");
}
Tuple2<SlotSharingGroup, CoLocationGroup> constraint = coLocationGroups.computeIfAbsent(
coLocationGroupKey, k -> new Tuple2<>(sharingGroup, new CoLocationGroup()));
if (constraint.f0 != sharingGroup) {
throw new IllegalStateException("Cannot co-locate operators from different slot sharing groups");
}
vertex.updateCoLocationGroup(constraint.f1);
constraint.f1.addVertex(vertex);
}
}
}
private static void setManagedMemoryFraction(
final Map<Integer, JobVertex> jobVertices,
final Map<Integer, StreamConfig> operatorConfigs,
final Map<Integer, Map<Integer, StreamConfig>> vertexChainedConfigs,
final java.util.function.Function<Integer, ResourceSpec> operatorResourceRetriever,
final java.util.function.Function<Integer, Integer> operatorManagedMemoryWeightRetriever) {
final Set<SlotSharingGroup> slotSharingGroups = Collections.newSetFromMap(new IdentityHashMap<>());
final Map<JobVertexID, Integer> vertexHeadOperators = new HashMap<>();
final Map<JobVertexID, Set<Integer>> vertexOperators = new HashMap<>();
for (Entry<Integer, JobVertex> entry : jobVertices.entrySet()) {
final int headOperatorId = entry.getKey();
final JobVertex jobVertex = entry.getValue();
final SlotSharingGroup jobVertexSlotSharingGroup = jobVertex.getSlotSharingGroup();
checkState(jobVertexSlotSharingGroup != null, "JobVertex slot sharing group must not be null");
slotSharingGroups.add(jobVertexSlotSharingGroup);
vertexHeadOperators.put(jobVertex.getID(), headOperatorId);
final Set<Integer> operatorIds = new HashSet<>();
operatorIds.add(headOperatorId);
operatorIds.addAll(vertexChainedConfigs.getOrDefault(headOperatorId, Collections.emptyMap()).keySet());
vertexOperators.put(jobVertex.getID(), operatorIds);
}
for (SlotSharingGroup slotSharingGroup : slotSharingGroups) {
setManagedMemoryFractionForSlotSharingGroup(
slotSharingGroup,
vertexHeadOperators,
vertexOperators,
operatorConfigs,
vertexChainedConfigs,
operatorResourceRetriever,
operatorManagedMemoryWeightRetriever);
}
}
private static void setManagedMemoryFractionForSlotSharingGroup(
final SlotSharingGroup slotSharingGroup,
final Map<JobVertexID, Integer> vertexHeadOperators,
final Map<JobVertexID, Set<Integer>> vertexOperators,
final Map<Integer, StreamConfig> operatorConfigs,
final Map<Integer, Map<Integer, StreamConfig>> vertexChainedConfigs,
final java.util.function.Function<Integer, ResourceSpec> operatorResourceRetriever,
final java.util.function.Function<Integer, Integer> operatorManagedMemoryWeightRetriever) {
final int groupManagedMemoryWeight = slotSharingGroup.getJobVertexIds().stream()
.flatMap(vid -> vertexOperators.get(vid).stream())
.mapToInt(operatorManagedMemoryWeightRetriever::apply)
.sum();
for (JobVertexID jobVertexID : slotSharingGroup.getJobVertexIds()) {
for (int operatorNodeId : vertexOperators.get(jobVertexID)) {
final StreamConfig operatorConfig = operatorConfigs.get(operatorNodeId);
final ResourceSpec operatorResourceSpec = operatorResourceRetriever.apply(operatorNodeId);
final int operatorManagedMemoryWeight = operatorManagedMemoryWeightRetriever.apply(operatorNodeId);
setManagedMemoryFractionForOperator(
operatorResourceSpec,
slotSharingGroup.getResourceSpec(),
operatorManagedMemoryWeight,
groupManagedMemoryWeight,
operatorConfig);
}
final int headOperatorNodeId = vertexHeadOperators.get(jobVertexID);
final StreamConfig vertexConfig = operatorConfigs.get(headOperatorNodeId);
vertexConfig.setTransitiveChainedTaskConfigs(vertexChainedConfigs.get(headOperatorNodeId));
}
}
private static void setManagedMemoryFractionForOperator(
final ResourceSpec operatorResourceSpec,
final ResourceSpec groupResourceSpec,
final int operatorManagedMemoryWeight,
final int groupManagedMemoryWeight,
final StreamConfig operatorConfig) {
final double managedMemoryFraction;
if (groupResourceSpec.equals(ResourceSpec.UNKNOWN)) {
managedMemoryFraction = groupManagedMemoryWeight > 0
? getFractionRoundedDown(operatorManagedMemoryWeight, groupManagedMemoryWeight)
: 0.0;
} else {
final long groupManagedMemoryBytes = groupResourceSpec.getManagedMemory().getBytes();
managedMemoryFraction = groupManagedMemoryBytes > 0
? getFractionRoundedDown(operatorResourceSpec.getManagedMemory().getBytes(), groupManagedMemoryBytes)
: 0.0;
}
operatorConfig.setManagedMemoryFraction(managedMemoryFraction);
}
private static double getFractionRoundedDown(final long dividend, final long divisor) {
return BigDecimal.valueOf(dividend)
.divide(BigDecimal.valueOf(divisor), MANAGED_MEMORY_FRACTION_SCALE, BigDecimal.ROUND_DOWN)
.doubleValue();
}
} |
I think we should not save Unknown stats into statsTable. | private void doPreHeat() {
List<ResultRow> recentStatsUpdatedCols = null;
long retryTimes = 0;
while (!StatisticsUtil.statsTblAvailable()) {
try {
Thread.sleep(100L);
} catch (InterruptedException e) {
}
}
while (retryTimes < StatisticConstants.PRELOAD_RETRY_TIMES) {
try {
recentStatsUpdatedCols = StatisticsRepository.fetchRecentStatsUpdatedCol();
break;
} catch (Throwable t) {
}
retryTimes++;
try {
Thread.sleep(StatisticConstants.PRELOAD_RETRY_INTERVAL_IN_SECONDS);
} catch (Throwable t) {
}
}
if (CollectionUtils.isEmpty(recentStatsUpdatedCols)) {
return;
}
for (ResultRow r : recentStatsUpdatedCols) {
try {
String tblId = r.getColumnValue("tbl_id");
String idxId = r.getColumnValue("idx_id");
String colId = r.getColumnValue("col_id");
final StatisticsCacheKey k =
new StatisticsCacheKey(Long.parseLong(tblId), Long.parseLong(idxId), colId);
final ColumnStatistic c = ColumnStatistic.fromResultRow(r);
CompletableFuture<Optional<ColumnStatistic>> f = new CompletableFuture<Optional<ColumnStatistic>>() {
@Override
public Optional<ColumnStatistic> get() throws InterruptedException, ExecutionException {
return Optional.of(c);
}
@Override
public boolean isDone() {
return true;
}
@Override
public boolean complete(Optional<ColumnStatistic> value) {
return true;
}
@Override
public Optional<ColumnStatistic> join() {
return Optional.of(c);
}
};
if (c == ColumnStatistic.UNKNOWN) {
continue;
}
columnStatisticsCache.put(k, f);
} catch (Throwable t) {
LOG.warn("Error when preheating stats cache", t);
}
}
} | if (c == ColumnStatistic.UNKNOWN) { | private void doPreHeat() {
List<ResultRow> recentStatsUpdatedCols = null;
long retryTimes = 0;
while (!StatisticsUtil.statsTblAvailable()) {
try {
Thread.sleep(100L);
} catch (InterruptedException e) {
}
}
while (retryTimes < StatisticConstants.PRELOAD_RETRY_TIMES) {
try {
recentStatsUpdatedCols = StatisticsRepository.fetchRecentStatsUpdatedCol();
break;
} catch (Throwable t) {
}
retryTimes++;
try {
Thread.sleep(StatisticConstants.PRELOAD_RETRY_INTERVAL_IN_SECONDS);
} catch (Throwable t) {
}
}
if (CollectionUtils.isEmpty(recentStatsUpdatedCols)) {
return;
}
for (ResultRow r : recentStatsUpdatedCols) {
try {
String tblId = r.getColumnValue("tbl_id");
String idxId = r.getColumnValue("idx_id");
String colId = r.getColumnValue("col_id");
final StatisticsCacheKey k =
new StatisticsCacheKey(Long.parseLong(tblId), Long.parseLong(idxId), colId);
final ColumnStatistic c = ColumnStatistic.fromResultRow(r);
CompletableFuture<Optional<ColumnStatistic>> f = new CompletableFuture<Optional<ColumnStatistic>>() {
@Override
public Optional<ColumnStatistic> get() throws InterruptedException, ExecutionException {
return Optional.of(c);
}
@Override
public boolean isDone() {
return true;
}
@Override
public boolean complete(Optional<ColumnStatistic> value) {
return true;
}
@Override
public Optional<ColumnStatistic> join() {
return Optional.of(c);
}
};
if (c == ColumnStatistic.UNKNOWN) {
continue;
}
columnStatisticsCache.put(k, f);
} catch (Throwable t) {
LOG.warn("Error when preheating stats cache", t);
}
}
} | class StatisticsCache {
private static final Logger LOG = LogManager.getLogger(StatisticsCache.class);
/**
* Use a standalone thread pool to avoid interference between this and any other jdk function
* that use the thread of ForkJoinPool
*/
private final ThreadPoolExecutor threadPool
= ThreadPoolManager.newDaemonFixedThreadPool(
10, Integer.MAX_VALUE, "STATS_FETCH", true);
private final ColumnStatisticsCacheLoader columnStatisticsCacheLoader = new ColumnStatisticsCacheLoader();
private final HistogramCacheLoader histogramCacheLoader = new HistogramCacheLoader();
private final AsyncLoadingCache<StatisticsCacheKey, Optional<ColumnStatistic>> columnStatisticsCache =
Caffeine.newBuilder()
.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)
.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))
.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))
.executor(threadPool)
.buildAsync(columnStatisticsCacheLoader);
private final AsyncLoadingCache<StatisticsCacheKey, Optional<Histogram>> histogramCache =
Caffeine.newBuilder()
.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)
.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))
.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))
.executor(threadPool)
.buildAsync(histogramCacheLoader);
{
threadPool.submit(() -> {
while (true) {
try {
columnStatisticsCacheLoader.removeExpiredInProgressing();
histogramCacheLoader.removeExpiredInProgressing();
} catch (Throwable t) {
}
Thread.sleep(TimeUnit.MINUTES.toMillis(15));
}
});
}
public ColumnStatistic getColumnStatistics(long tblId, String colName) {
return getColumnStatistics(tblId, -1, colName).orElse(ColumnStatistic.UNKNOWN);
}
public Optional<ColumnStatistic> getColumnStatistics(long tblId, long idxId, String colName) {
ConnectContext ctx = ConnectContext.get();
if (ctx != null && ctx.getSessionVariable().internalSession) {
return Optional.empty();
}
StatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);
try {
CompletableFuture<Optional<ColumnStatistic>> f = columnStatisticsCache.get(k);
if (f.isDone()) {
return f.get();
}
} catch (Exception e) {
LOG.warn("Unexpected exception while returning ColumnStatistic", e);
}
return Optional.empty();
}
public Histogram getHistogram(long tblId, String colName) {
return getHistogram(tblId, -1, colName).orElse(null);
}
public Optional<Histogram> getHistogram(long tblId, long idxId, String colName) {
ConnectContext ctx = ConnectContext.get();
if (ctx != null && ctx.getSessionVariable().internalSession) {
return Optional.empty();
}
StatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);
try {
CompletableFuture<Optional<Histogram>> f = histogramCache.get(k);
if (f.isDone()) {
return f.get();
}
} catch (Exception e) {
LOG.warn("Unexpected exception while returning Histogram", e);
}
return Optional.empty();
}
public void invidate(long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().invalidate(new StatisticsCacheKey(tblId, idxId, colName));
}
public void updateColStatsCache(long tblId, long idxId, String colName, ColumnStatistic statistic) {
columnStatisticsCache.synchronous().put(new StatisticsCacheKey(tblId, idxId, colName), Optional.of(statistic));
}
public void refreshColStatsSync(long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));
}
public void refreshHistogramSync(long tblId, long idxId, String colName) {
histogramCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));
}
public void preHeat() {
threadPool.submit(this::doPreHeat);
}
} | class StatisticsCache {
private static final Logger LOG = LogManager.getLogger(StatisticsCache.class);
/**
* Use a standalone thread pool to avoid interference between this and any other jdk function
* that use the thread of ForkJoinPool
*/
private final ThreadPoolExecutor threadPool
= ThreadPoolManager.newDaemonFixedThreadPool(
10, Integer.MAX_VALUE, "STATS_FETCH", true);
private final ColumnStatisticsCacheLoader columnStatisticsCacheLoader = new ColumnStatisticsCacheLoader();
private final HistogramCacheLoader histogramCacheLoader = new HistogramCacheLoader();
private final AsyncLoadingCache<StatisticsCacheKey, Optional<ColumnStatistic>> columnStatisticsCache =
Caffeine.newBuilder()
.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)
.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))
.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))
.executor(threadPool)
.buildAsync(columnStatisticsCacheLoader);
private final AsyncLoadingCache<StatisticsCacheKey, Optional<Histogram>> histogramCache =
Caffeine.newBuilder()
.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)
.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))
.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))
.executor(threadPool)
.buildAsync(histogramCacheLoader);
{
threadPool.submit(() -> {
while (true) {
try {
columnStatisticsCacheLoader.removeExpiredInProgressing();
histogramCacheLoader.removeExpiredInProgressing();
} catch (Throwable t) {
}
Thread.sleep(TimeUnit.MINUTES.toMillis(15));
}
});
}
public ColumnStatistic getColumnStatistics(long tblId, String colName) {
return getColumnStatistics(tblId, -1, colName).orElse(ColumnStatistic.UNKNOWN);
}
public Optional<ColumnStatistic> getColumnStatistics(long tblId, long idxId, String colName) {
ConnectContext ctx = ConnectContext.get();
if (ctx != null && ctx.getSessionVariable().internalSession) {
return Optional.empty();
}
StatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);
try {
CompletableFuture<Optional<ColumnStatistic>> f = columnStatisticsCache.get(k);
if (f.isDone()) {
return f.get();
}
} catch (Exception e) {
LOG.warn("Unexpected exception while returning ColumnStatistic", e);
}
return Optional.empty();
}
public Histogram getHistogram(long tblId, String colName) {
return getHistogram(tblId, -1, colName).orElse(null);
}
public Optional<Histogram> getHistogram(long tblId, long idxId, String colName) {
ConnectContext ctx = ConnectContext.get();
if (ctx != null && ctx.getSessionVariable().internalSession) {
return Optional.empty();
}
StatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);
try {
CompletableFuture<Optional<Histogram>> f = histogramCache.get(k);
if (f.isDone()) {
return f.get();
}
} catch (Exception e) {
LOG.warn("Unexpected exception while returning Histogram", e);
}
return Optional.empty();
}
public void invidate(long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().invalidate(new StatisticsCacheKey(tblId, idxId, colName));
}
public void updateColStatsCache(long tblId, long idxId, String colName, ColumnStatistic statistic) {
columnStatisticsCache.synchronous().put(new StatisticsCacheKey(tblId, idxId, colName), Optional.of(statistic));
}
public void refreshColStatsSync(long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));
}
public void refreshHistogramSync(long tblId, long idxId, String colName) {
histogramCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));
}
public void preHeat() {
threadPool.submit(this::doPreHeat);
}
} |
Question: If I wrote an always false condition like `WHERE TIMESTAMP = x AND TIMESTAMP = y` where the x != y, will the query run correctly? Another case is `WHERE TIMESTAMP > x AND TIMESTAMP >y` where x != y, will the query run correctly? | public PlanFragment visitPhysicalSchemaScan(OptExpression optExpression, ExecPlan context) {
PhysicalSchemaScanOperator node = (PhysicalSchemaScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
SchemaScanNode scanNode = new SchemaScanNode(context.getNextNodeId(), tupleDescriptor);
scanNode.setFrontendIP(FrontendOptions.getLocalHostAddress());
scanNode.setFrontendPort(Config.rpc_port);
scanNode.setUser(context.getConnectContext().getQualifiedUser());
scanNode.setUserIp(context.getConnectContext().getRemoteIP());
scanNode.setLimit(node.getLimit());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
if (!(predicate.getChildren().size() == 2 && predicate.getChildren().get(0) instanceof ColumnRefOperator &&
predicate.getChildren().get(1) instanceof ConstantOperator)) {
continue;
}
ColumnRefOperator columnRefOperator = (ColumnRefOperator) predicate.getChildren().get(0);
ConstantOperator constantOperator = (ConstantOperator) predicate.getChildren().get(1);
if (predicate instanceof BinaryPredicateOperator) {
BinaryPredicateOperator binaryPredicateOperator = (BinaryPredicateOperator) predicate;
if (binaryPredicateOperator.getBinaryType() == BinaryPredicateOperator.BinaryType.EQ) {
switch (columnRefOperator.getName()) {
case "TABLE_SCHEMA":
case "DATABASE_NAME":
scanNode.setSchemaDb(constantOperator.getVarchar());
break;
case "TABLE_NAME":
scanNode.setSchemaTable(constantOperator.getVarchar());
break;
case "BE_ID":
scanNode.setBeId(constantOperator.getBigint());
break;
case "TABLE_ID":
scanNode.setTableId(constantOperator.getBigint());
break;
case "PARTITION_ID":
scanNode.setPartitionId(constantOperator.getBigint());
break;
case "TABLET_ID":
scanNode.setTabletId(constantOperator.getBigint());
break;
case "TXN_ID":
scanNode.setTxnId(constantOperator.getBigint());
break;
case "LABEL":
scanNode.setLabel(constantOperator.getVarchar());
break;
case "JOB_ID":
scanNode.setJobId(constantOperator.getBigint());
break;
case "TYPE":
scanNode.setType(constantOperator.getVarchar());
break;
case "STATE":
scanNode.setState(constantOperator.getVarchar());
break;
case "LOG":
scanNode.setLogPattern("^" + constantOperator.getVarchar() + "$");
break;
case "LEVEL":
scanNode.setLogLevel(constantOperator.getVarchar());
break;
default:
break;
}
}
if (columnRefOperator.getName().equals("TIMESTAMP")) {
BinaryPredicateOperator.BinaryType opType = binaryPredicateOperator.getBinaryType();
if (opType == BinaryPredicateOperator.BinaryType.EQ) {
scanNode.setLogStartTs(constantOperator.getBigint());
scanNode.setLogEndTs(constantOperator.getBigint() + 1);
} else if (opType == BinaryPredicateOperator.BinaryType.GT) {
scanNode.setLogStartTs(constantOperator.getBigint() + 1);
} else if (opType == BinaryPredicateOperator.BinaryType.GE) {
scanNode.setLogStartTs(constantOperator.getBigint());
} else if (opType == BinaryPredicateOperator.BinaryType.LT) {
scanNode.setLogEndTs(constantOperator.getBigint());
} else if (opType == BinaryPredicateOperator.BinaryType.LE) {
scanNode.setLogEndTs(constantOperator.getBigint() + 1);
}
}
} else if (predicate instanceof LikePredicateOperator) {
LikePredicateOperator like = (LikePredicateOperator) predicate;
if (columnRefOperator.getName().equals("LOG")) {
if (like.getLikeType() == LikePredicateOperator.LikeType.REGEXP) {
scanNode.setLogPattern(((ConstantOperator) like.getChildren().get(1)).getVarchar());
} else {
throw UnsupportedException.unsupportedException("only support `regexp` or `rlike` for log grep");
}
}
}
}
if (scanNode.isBeSchemaTable()) {
scanNode.computeBeScanRanges();
}
if (scanNode.getLimit() > 0) {
scanNode.setLogLimit(Math.min(scanNode.getLimit(), Config.max_per_node_grep_log_limit));
} else {
scanNode.setLogLimit(Config.max_per_node_grep_log_limit);
}
context.getScanNodes().add(scanNode);
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode,
scanNode.isBeSchemaTable() ? DataPartition.RANDOM : DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
} | if (columnRefOperator.getName().equals("TIMESTAMP")) { | public PlanFragment visitPhysicalSchemaScan(OptExpression optExpression, ExecPlan context) {
PhysicalSchemaScanOperator node = (PhysicalSchemaScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
SchemaScanNode scanNode = new SchemaScanNode(context.getNextNodeId(), tupleDescriptor);
scanNode.setFrontendIP(FrontendOptions.getLocalHostAddress());
scanNode.setFrontendPort(Config.rpc_port);
scanNode.setUser(context.getConnectContext().getQualifiedUser());
scanNode.setUserIp(context.getConnectContext().getRemoteIP());
scanNode.setLimit(node.getLimit());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
if (!(predicate.getChildren().size() == 2 && predicate.getChildren().get(0) instanceof ColumnRefOperator &&
predicate.getChildren().get(1) instanceof ConstantOperator)) {
continue;
}
ColumnRefOperator columnRefOperator = (ColumnRefOperator) predicate.getChildren().get(0);
ConstantOperator constantOperator = (ConstantOperator) predicate.getChildren().get(1);
if (predicate instanceof BinaryPredicateOperator) {
BinaryPredicateOperator binaryPredicateOperator = (BinaryPredicateOperator) predicate;
if (binaryPredicateOperator.getBinaryType() == BinaryPredicateOperator.BinaryType.EQ) {
switch (columnRefOperator.getName()) {
case "TABLE_SCHEMA":
case "DATABASE_NAME":
scanNode.setSchemaDb(constantOperator.getVarchar());
break;
case "TABLE_NAME":
scanNode.setSchemaTable(constantOperator.getVarchar());
break;
case "BE_ID":
scanNode.setBeId(constantOperator.getBigint());
break;
case "TABLE_ID":
scanNode.setTableId(constantOperator.getBigint());
break;
case "PARTITION_ID":
scanNode.setPartitionId(constantOperator.getBigint());
break;
case "TABLET_ID":
scanNode.setTabletId(constantOperator.getBigint());
break;
case "TXN_ID":
scanNode.setTxnId(constantOperator.getBigint());
break;
case "LABEL":
scanNode.setLabel(constantOperator.getVarchar());
break;
case "JOB_ID":
scanNode.setJobId(constantOperator.getBigint());
break;
case "TYPE":
scanNode.setType(constantOperator.getVarchar());
break;
case "STATE":
scanNode.setState(constantOperator.getVarchar());
break;
case "LOG":
scanNode.setLogPattern("^" + constantOperator.getVarchar() + "$");
break;
case "LEVEL":
scanNode.setLogLevel(constantOperator.getVarchar());
break;
default:
break;
}
}
if (columnRefOperator.getName().equals("TIMESTAMP")) {
BinaryPredicateOperator.BinaryType opType = binaryPredicateOperator.getBinaryType();
if (opType == BinaryPredicateOperator.BinaryType.EQ) {
scanNode.setLogStartTs(constantOperator.getBigint());
scanNode.setLogEndTs(constantOperator.getBigint() + 1);
} else if (opType == BinaryPredicateOperator.BinaryType.GT) {
scanNode.setLogStartTs(constantOperator.getBigint() + 1);
} else if (opType == BinaryPredicateOperator.BinaryType.GE) {
scanNode.setLogStartTs(constantOperator.getBigint());
} else if (opType == BinaryPredicateOperator.BinaryType.LT) {
scanNode.setLogEndTs(constantOperator.getBigint());
} else if (opType == BinaryPredicateOperator.BinaryType.LE) {
scanNode.setLogEndTs(constantOperator.getBigint() + 1);
}
}
} else if (predicate instanceof LikePredicateOperator) {
LikePredicateOperator like = (LikePredicateOperator) predicate;
if (columnRefOperator.getName().equals("LOG")) {
if (like.getLikeType() == LikePredicateOperator.LikeType.REGEXP) {
scanNode.setLogPattern(((ConstantOperator) like.getChildren().get(1)).getVarchar());
} else {
throw UnsupportedException.unsupportedException("only support `regexp` or `rlike` for log grep");
}
}
}
}
if (scanNode.isBeSchemaTable()) {
scanNode.computeBeScanRanges();
}
if (scanNode.getLimit() > 0) {
scanNode.setLogLimit(Math.min(scanNode.getLimit(), Config.max_per_node_grep_log_limit));
} else {
scanNode.setLogLimit(Config.max_per_node_grep_log_limit);
}
context.getScanNodes().add(scanNode);
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode,
scanNode.isBeSchemaTable() ? DataPartition.RANDOM : DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
} | class PhysicalPlanTranslator extends OptExpressionVisitor<PlanFragment, ExecPlan> {
private final ColumnRefFactory columnRefFactory;
private final IdGenerator<RuntimeFilterId> runtimeFilterIdIdGenerator = RuntimeFilterId.createGenerator();
private boolean canUseLocalShuffleAgg = true;
public PhysicalPlanTranslator(ColumnRefFactory columnRefFactory) {
this.columnRefFactory = columnRefFactory;
}
public PlanFragment translate(OptExpression optExpression, ExecPlan context) {
return visit(optExpression, context);
}
@Override
public PlanFragment visit(OptExpression optExpression, ExecPlan context) {
canUseLocalShuffleAgg &= optExpression.arity() <= 1;
PlanFragment fragment = optExpression.getOp().accept(this, optExpression, context);
Projection projection = (optExpression.getOp()).getProjection();
if (projection == null) {
return fragment;
} else {
return buildProjectNode(optExpression, projection, fragment, context);
}
}
private void setUnUsedOutputColumns(PhysicalOlapScanOperator node, OlapScanNode scanNode,
List<ScalarOperator> predicates, OlapTable referenceTable) {
if (!ConnectContext.get().getSessionVariable().isEnableFilterUnusedColumnsInScanStage()) {
return;
}
MaterializedIndexMeta materializedIndexMeta =
referenceTable.getIndexMetaByIndexId(node.getSelectedIndexId());
if (materializedIndexMeta.getKeysType().isAggregationFamily() && !node.isPreAggregation()) {
return;
}
List<ColumnRefOperator> outputColumns = node.getOutputColumns();
if (outputColumns.isEmpty()) {
return;
}
Set<Integer> outputColumnIds = new HashSet<Integer>();
for (ColumnRefOperator colref : outputColumns) {
outputColumnIds.add(colref.getId());
}
Set<Integer> singlePredColumnIds = new HashSet<Integer>();
Set<Integer> complexPredColumnIds = new HashSet<Integer>();
Set<String> aggOrPrimaryKeyTableValueColumnNames = new HashSet<String>();
if (materializedIndexMeta.getKeysType().isAggregationFamily() ||
materializedIndexMeta.getKeysType() == KeysType.PRIMARY_KEYS) {
aggOrPrimaryKeyTableValueColumnNames =
materializedIndexMeta.getSchema().stream()
.filter(col -> !col.isKey())
.map(Column::getName)
.collect(Collectors.toSet());
}
for (ScalarOperator predicate : predicates) {
ColumnRefSet usedColumns = predicate.getUsedColumns();
if (DecodeVisitor.isSimpleStrictPredicate(predicate)) {
for (int cid : usedColumns.getColumnIds()) {
singlePredColumnIds.add(cid);
}
} else {
for (int cid : usedColumns.getColumnIds()) {
complexPredColumnIds.add(cid);
}
}
}
Set<Integer> unUsedOutputColumnIds = new HashSet<Integer>();
Map<Integer, Integer> dictStringIdToIntIds = node.getDictStringIdToIntIds();
for (Integer cid : singlePredColumnIds) {
Integer newCid = cid;
if (dictStringIdToIntIds.containsKey(cid)) {
newCid = dictStringIdToIntIds.get(cid);
}
if (!complexPredColumnIds.contains(newCid) && !outputColumnIds.contains(newCid)) {
unUsedOutputColumnIds.add(newCid);
}
}
scanNode.setUnUsedOutputStringColumns(unUsedOutputColumnIds, aggOrPrimaryKeyTableValueColumnNames);
}
@Override
public PlanFragment visitPhysicalProject(OptExpression optExpr, ExecPlan context) {
PhysicalProjectOperator node = (PhysicalProjectOperator) optExpr.getOp();
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
Preconditions.checkState(!node.getColumnRefMap().isEmpty());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(),
node.getCommonSubOperatorMap()));
commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(false);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
Map<SlotId, Expr> projectMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap()));
projectMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
ProjectNode projectNode =
new ProjectNode(context.getNextNodeId(),
tupleDescriptor,
inputFragment.getPlanRoot(),
projectMap,
commonSubOperatorMap);
projectNode.setHasNullableGenerateChild();
projectNode.computeStatistics(optExpr.getStatistics());
for (SlotId sid : projectMap.keySet()) {
SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt());
slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild());
}
tupleDescriptor.computeMemLayout();
projectNode.setLimit(inputFragment.getPlanRoot().getLimit());
inputFragment.setPlanRoot(projectNode);
return inputFragment;
}
public PlanFragment buildProjectNode(OptExpression optExpression, Projection node, PlanFragment inputFragment,
ExecPlan context) {
if (node == null) {
return inputFragment;
}
Preconditions.checkState(!node.getColumnRefMap().isEmpty());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(),
node.getCommonSubOperatorMap()));
commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(false);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
Map<SlotId, Expr> projectMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap()));
projectMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
ProjectNode projectNode =
new ProjectNode(context.getNextNodeId(),
tupleDescriptor,
inputFragment.getPlanRoot(),
projectMap,
commonSubOperatorMap);
projectNode.setHasNullableGenerateChild();
Statistics statistics = optExpression.getStatistics();
Statistics.Builder b = Statistics.builder();
b.setOutputRowCount(statistics.getOutputRowCount());
b.addColumnStatisticsFromOtherStatistic(statistics, new ColumnRefSet(node.getOutputColumns()));
projectNode.computeStatistics(b.build());
for (SlotId sid : projectMap.keySet()) {
SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt());
slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild());
}
tupleDescriptor.computeMemLayout();
projectNode.setLimit(inputFragment.getPlanRoot().getLimit());
inputFragment.setPlanRoot(projectNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalDecode(OptExpression optExpression, ExecPlan context) {
PhysicalDecodeOperator node = (PhysicalDecodeOperator) optExpression.getOp();
PlanFragment inputFragment = visit(optExpression.inputAt(0), context);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
for (TupleId tupleId : inputFragment.getPlanRoot().getTupleIds()) {
TupleDescriptor childTuple = context.getDescTbl().getTupleDesc(tupleId);
ArrayList<SlotDescriptor> slots = childTuple.getSlots();
for (SlotDescriptor slot : slots) {
int slotId = slot.getId().asInt();
boolean isNullable = slot.getIsNullable();
if (node.getDictToStrings().containsKey(slotId)) {
Integer stringSlotId = node.getDictToStrings().get(slotId);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(stringSlotId));
slotDescriptor.setIsNullable(isNullable);
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(Type.VARCHAR);
context.getColRefToExpr().put(new ColumnRefOperator(stringSlotId, Type.VARCHAR,
"<dict-code>", slotDescriptor.getIsNullable()),
new SlotRef(stringSlotId.toString(), slotDescriptor));
} else {
SlotDescriptor slotDescriptor = new SlotDescriptor(slot.getId(), tupleDescriptor, slot);
tupleDescriptor.addSlot(slotDescriptor);
}
}
}
Map<SlotId, Expr> projectMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getStringFunctions().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(),
node.getStringFunctions()));
projectMap.put(new SlotId(entry.getKey().getId()), expr);
Preconditions.checkState(context.getColRefToExpr().containsKey(entry.getKey()));
}
tupleDescriptor.computeMemLayout();
DecodeNode decodeNode = new DecodeNode(context.getNextNodeId(),
tupleDescriptor,
inputFragment.getPlanRoot(),
node.getDictToStrings(), projectMap);
decodeNode.computeStatistics(optExpression.getStatistics());
decodeNode.setLimit(node.getLimit());
inputFragment.setPlanRoot(decodeNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalOlapScan(OptExpression optExpr, ExecPlan context) {
PhysicalOlapScanOperator node = (PhysicalOlapScanOperator) optExpr.getOp();
OlapTable referenceTable = (OlapTable) node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
OlapScanNode scanNode = new OlapScanNode(context.getNextNodeId(), tupleDescriptor, "OlapScanNode");
scanNode.setLimit(node.getLimit());
scanNode.computeStatistics(optExpr.getStatistics());
try {
scanNode.updateScanInfo(node.getSelectedPartitionId(),
node.getSelectedTabletId(),
node.getSelectedIndexId());
long selectedIndexId = node.getSelectedIndexId();
long totalTabletsNum = 0;
long localBeId = -1;
if (Config.enable_local_replica_selection) {
localBeId = GlobalStateMgr.getCurrentSystemInfo()
.getBackendIdByHost(FrontendOptions.getLocalHostAddress());
}
List<Long> selectedNonEmptyPartitionIds = node.getSelectedPartitionId().stream().filter(p -> {
List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(p);
return selectTabletIds != null && !selectTabletIds.isEmpty();
}).collect(Collectors.toList());
scanNode.setSelectedPartitionIds(selectedNonEmptyPartitionIds);
for (Long partitionId : scanNode.getSelectedPartitionIds()) {
List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(partitionId);
Preconditions.checkState(selectTabletIds != null && !selectTabletIds.isEmpty());
final Partition partition = referenceTable.getPartition(partitionId);
final MaterializedIndex selectedTable = partition.getIndex(selectedIndexId);
List<Long> allTabletIds = selectedTable.getTabletIdsInOrder();
Map<Long, Integer> tabletId2BucketSeq = Maps.newHashMap();
for (int i = 0; i < allTabletIds.size(); i++) {
tabletId2BucketSeq.put(allTabletIds.get(i), i);
}
totalTabletsNum += selectedTable.getTablets().size();
scanNode.setTabletId2BucketSeq(tabletId2BucketSeq);
List<Tablet> tablets =
selectTabletIds.stream().map(selectedTable::getTablet).collect(Collectors.toList());
scanNode.addScanRangeLocations(partition, selectedTable, tablets, localBeId);
}
scanNode.setTotalTabletsNum(totalTabletsNum);
} catch (UserException e) {
throw new StarRocksPlannerException(
"Build Exec OlapScanNode fail, scan info is invalid," + e.getMessage(),
INTERNAL_ERROR);
}
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
if (slotDescriptor.getOriginType().isComplexType()) {
slotDescriptor.setOriginType(entry.getKey().getType());
slotDescriptor.setType(entry.getKey().getType());
}
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
for (ColumnRefOperator entry : node.getGlobalDictStringColumns()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getId()));
slotDescriptor.setIsNullable(entry.isNullable());
slotDescriptor.setType(entry.getType());
slotDescriptor.setIsMaterialized(false);
context.getColRefToExpr().put(entry, new SlotRef(entry.toString(), slotDescriptor));
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
for (ScalarOperator predicate : node.getPrunedPartitionPredicates()) {
scanNode.getPrunedPartitionPredicates()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
tupleDescriptor.computeMemLayout();
setUnUsedOutputColumns(node, scanNode, predicates, referenceTable);
scanNode.setIsSortedByKeyPerTablet(node.needSortedByKeyPerTablet());
scanNode.setIsPreAggregation(node.isPreAggregation(), node.getTurnOffReason());
scanNode.setDictStringIdToIntIds(node.getDictStringIdToIntIds());
scanNode.updateAppliedDictStringColumns(node.getGlobalDicts().stream().
map(entry -> entry.first).collect(Collectors.toSet()));
List<ColumnRefOperator> bucketColumns = getShuffleColumns(node.getDistributionSpec());
boolean useAllBucketColumns =
bucketColumns.stream().allMatch(c -> node.getColRefToColumnMetaMap().containsKey(c));
if (useAllBucketColumns) {
List<Expr> bucketExprs = bucketColumns.stream()
.map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
scanNode.setBucketExprs(bucketExprs);
scanNode.setBucketColumns(bucketColumns);
}
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM);
fragment.setQueryGlobalDicts(node.getGlobalDicts());
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalMetaScan(OptExpression optExpression, ExecPlan context) {
PhysicalMetaScanOperator scan = (PhysicalMetaScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(scan.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(scan.getTable());
MetaScanNode scanNode =
new MetaScanNode(context.getNextNodeId(),
tupleDescriptor, (OlapTable) scan.getTable(), scan.getAggColumnIdToNames());
scanNode.computeRangeLocations();
scanNode.computeStatistics(optExpression.getStatistics());
for (Map.Entry<ColumnRefOperator, Column> entry : scan.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
private void prepareContextSlots(PhysicalScanOperator node, ExecPlan context, TupleDescriptor tupleDescriptor) {
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
if (slotDescriptor.getOriginType().isComplexType()) {
slotDescriptor.setOriginType(entry.getKey().getType());
slotDescriptor.setType(entry.getKey().getType());
}
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
}
private void prepareCommonExpr(HDFSScanNodePredicates scanNodePredicates,
ScanOperatorPredicates predicates, ExecPlan context) {
List<ScalarOperator> noEvalPartitionConjuncts = predicates.getNoEvalPartitionConjuncts();
List<ScalarOperator> nonPartitionConjuncts = predicates.getNonPartitionConjuncts();
List<ScalarOperator> partitionConjuncts = predicates.getPartitionConjuncts();
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator partitionConjunct : partitionConjuncts) {
scanNodePredicates.getPartitionConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(partitionConjunct, formatterContext));
}
for (ScalarOperator noEvalPartitionConjunct : noEvalPartitionConjuncts) {
scanNodePredicates.getNoEvalPartitionConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(noEvalPartitionConjunct, formatterContext));
}
for (ScalarOperator nonPartitionConjunct : nonPartitionConjuncts) {
scanNodePredicates.getNonPartitionConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(nonPartitionConjunct, formatterContext));
}
}
private void prepareMinMaxExpr(HDFSScanNodePredicates scanNodePredicates,
ScanOperatorPredicates predicates, ExecPlan context) {
/*
* populates 'minMaxTuple' with slots for statistics values,
* and populates 'minMaxConjuncts' with conjuncts pointing into the 'minMaxTuple'
*/
List<ScalarOperator> minMaxConjuncts = predicates.getMinMaxConjuncts();
TupleDescriptor minMaxTuple = context.getDescTbl().createTupleDescriptor();
for (ScalarOperator minMaxConjunct : minMaxConjuncts) {
for (ColumnRefOperator columnRefOperator : Utils.extractColumnRef(minMaxConjunct)) {
SlotDescriptor slotDescriptor =
context.getDescTbl()
.addSlotDescriptor(minMaxTuple, new SlotId(columnRefOperator.getId()));
Column column = predicates.getMinMaxColumnRefMap().get(columnRefOperator);
slotDescriptor.setColumn(column);
slotDescriptor.setIsNullable(column.isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr()
.put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor));
}
}
minMaxTuple.computeMemLayout();
scanNodePredicates.setMinMaxTuple(minMaxTuple);
ScalarOperatorToExpr.FormatterContext minMaxFormatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator minMaxConjunct : minMaxConjuncts) {
scanNodePredicates.getMinMaxConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(minMaxConjunct, minMaxFormatterContext));
}
}
@Override
public PlanFragment visitPhysicalHudiScan(OptExpression optExpression, ExecPlan context) {
PhysicalHudiScanOperator node = (PhysicalHudiScanOperator) optExpression.getOp();
ScanOperatorPredicates predicates = node.getScanOperatorPredicates();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
HudiScanNode hudiScanNode =
new HudiScanNode(context.getNextNodeId(), tupleDescriptor, "HudiScanNode");
hudiScanNode.computeStatistics(optExpression.getStatistics());
try {
HDFSScanNodePredicates scanNodePredicates = hudiScanNode.getScanNodePredicates();
scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds());
scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey());
hudiScanNode.setupScanRangeLocations(context.getDescTbl());
prepareCommonExpr(scanNodePredicates, predicates, context);
prepareMinMaxExpr(scanNodePredicates, predicates, context);
} catch (Exception e) {
LOG.warn("Hudi scan node get scan range locations failed : " + e);
LOG.warn(e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
hudiScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(hudiScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), hudiScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalHiveScan(OptExpression optExpression, ExecPlan context) {
PhysicalHiveScanOperator node = (PhysicalHiveScanOperator) optExpression.getOp();
ScanOperatorPredicates predicates = node.getScanOperatorPredicates();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
HdfsScanNode hdfsScanNode =
new HdfsScanNode(context.getNextNodeId(), tupleDescriptor, "HdfsScanNode");
hdfsScanNode.computeStatistics(optExpression.getStatistics());
try {
HDFSScanNodePredicates scanNodePredicates = hdfsScanNode.getScanNodePredicates();
scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds());
scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey());
hdfsScanNode.setupScanRangeLocations(context.getDescTbl());
prepareCommonExpr(scanNodePredicates, predicates, context);
prepareMinMaxExpr(scanNodePredicates, predicates, context);
} catch (Exception e) {
LOG.warn("Hdfs scan node get scan range locations failed : " + e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
hdfsScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(hdfsScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), hdfsScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalFileScan(OptExpression optExpression, ExecPlan context) {
PhysicalFileScanOperator node = (PhysicalFileScanOperator) optExpression.getOp();
ScanOperatorPredicates predicates = node.getScanOperatorPredicates();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
FileTableScanNode fileTableScanNode =
new FileTableScanNode(context.getNextNodeId(), tupleDescriptor, "FileTableScanNode");
fileTableScanNode.computeStatistics(optExpression.getStatistics());
try {
HDFSScanNodePredicates scanNodePredicates = fileTableScanNode.getScanNodePredicates();
fileTableScanNode.setupScanRangeLocations();
prepareCommonExpr(scanNodePredicates, predicates, context);
prepareMinMaxExpr(scanNodePredicates, predicates, context);
} catch (Exception e) {
LOG.warn("Hdfs scan node get scan range locations failed : ", e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
fileTableScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(fileTableScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), fileTableScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalDeltaLakeScan(OptExpression optExpression, ExecPlan context) {
PhysicalDeltaLakeScanOperator node = (PhysicalDeltaLakeScanOperator) optExpression.getOp();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
DeltaLakeScanNode deltaLakeScanNode =
new DeltaLakeScanNode(context.getNextNodeId(), tupleDescriptor, "DeltaLakeScanNode");
deltaLakeScanNode.computeStatistics(optExpression.getStatistics());
try {
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
for (ScalarOperator predicate : predicates) {
deltaLakeScanNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
deltaLakeScanNode.setupScanRangeLocations(context.getDescTbl());
HDFSScanNodePredicates scanNodePredicates = deltaLakeScanNode.getScanNodePredicates();
prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context);
} catch (AnalysisException e) {
LOG.warn("Delta lake scan node get scan range locations failed : " + e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
deltaLakeScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(deltaLakeScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), deltaLakeScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalIcebergScan(OptExpression optExpression, ExecPlan context) {
PhysicalIcebergScanOperator node = (PhysicalIcebergScanOperator) optExpression.getOp();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
IcebergScanNode icebergScanNode =
new IcebergScanNode(context.getNextNodeId(), tupleDescriptor, "IcebergScanNode");
icebergScanNode.computeStatistics(optExpression.getStatistics());
try {
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
for (ScalarOperator predicate : predicates) {
icebergScanNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
icebergScanNode.preProcessIcebergPredicate(node.getPredicate());
icebergScanNode.setupScanRangeLocations();
icebergScanNode.appendEqualityColumns(node, columnRefFactory, context);
HDFSScanNodePredicates scanNodePredicates = icebergScanNode.getScanNodePredicates();
prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context);
} catch (UserException e) {
LOG.warn("Iceberg scan node get scan range locations failed : " + e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
icebergScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(icebergScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), icebergScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
@Override
public PlanFragment visitPhysicalMysqlScan(OptExpression optExpression, ExecPlan context) {
PhysicalMysqlScanOperator node = (PhysicalMysqlScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
MysqlScanNode scanNode = new MysqlScanNode(context.getNextNodeId(), tupleDescriptor,
(MysqlTable) node.getTable());
if (node.getTemporalClause() != null) {
scanNode.setTemporalClause(node.getTemporalClause());
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
formatterContext.setImplicitCast(true);
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
scanNode.setLimit(node.getLimit());
scanNode.computeColumnsAndFilters();
scanNode.computeStatistics(optExpression.getStatistics());
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalEsScan(OptExpression optExpression, ExecPlan context) {
PhysicalEsScanOperator node = (PhysicalEsScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
EsScanNode scanNode = new EsScanNode(context.getNextNodeId(), tupleDescriptor, "EsScanNode");
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
scanNode.setLimit(node.getLimit());
scanNode.computeStatistics(optExpression.getStatistics());
try {
scanNode.assignBackends();
} catch (UserException e) {
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
scanNode.setShardScanRanges(scanNode.computeShardLocations(node.getSelectedIndex()));
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalJDBCScan(OptExpression optExpression, ExecPlan context) {
PhysicalJDBCScanOperator node = (PhysicalJDBCScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
JDBCScanNode scanNode = new JDBCScanNode(context.getNextNodeId(), tupleDescriptor,
(JDBCTable) node.getTable());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
formatterContext.setImplicitCast(true);
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
scanNode.setLimit(node.getLimit());
scanNode.computeColumnsAndFilters();
scanNode.computeStatistics(optExpression.getStatistics());
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalValues(OptExpression optExpr, ExecPlan context) {
PhysicalValuesOperator valuesOperator = (PhysicalValuesOperator) optExpr.getOp();
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : valuesOperator.getColumnRefSet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(columnRefOperator.getId()));
slotDescriptor.setIsNullable(columnRefOperator.isNullable());
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(columnRefOperator.getType());
context.getColRefToExpr()
.put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
if (valuesOperator.getRows().isEmpty()) {
EmptySetNode emptyNode = new EmptySetNode(context.getNextNodeId(),
Lists.newArrayList(tupleDescriptor.getId()));
emptyNode.computeStatistics(optExpr.getStatistics());
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), emptyNode,
DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
} else {
UnionNode unionNode = new UnionNode(context.getNextNodeId(), tupleDescriptor.getId());
unionNode.setLimit(valuesOperator.getLimit());
List<List<Expr>> consts = new ArrayList<>();
for (List<ScalarOperator> row : valuesOperator.getRows()) {
List<Expr> exprRow = new ArrayList<>();
for (ScalarOperator field : row) {
exprRow.add(ScalarOperatorToExpr.buildExecExpression(
field, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())));
}
consts.add(exprRow);
}
unionNode.setMaterializedConstExprLists_(consts);
unionNode.computeStatistics(optExpr.getStatistics());
/*
* TODO(lhy):
* It doesn't make sense for vectorized execution engines, but it will appear in explain.
* we can delete this when refactoring explain in the future,
*/
consts.forEach(unionNode::addConstExprList);
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), unionNode,
DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
}
}
public static boolean hasNoExchangeNodes(PlanNode root) {
if (root instanceof ExchangeNode) {
return false;
}
for (PlanNode childNode : root.getChildren()) {
if (!hasNoExchangeNodes(childNode)) {
return false;
}
}
return true;
}
/**
* Whether all the nodes of the plan tree only contain the specific node types.
*
* @param root The plan tree root.
* @param requiredNodeTypes The specific node type.
* @return true if all the nodes belong to the node types, otherwise false.
*/
private boolean onlyContainNodeTypes(PlanNode root, List<Class<? extends PlanNode>> requiredNodeTypes) {
boolean rootMatched = requiredNodeTypes.stream().anyMatch(type -> type.isInstance(root));
if (!rootMatched) {
return false;
}
for (PlanNode child : root.getChildren()) {
if (!onlyContainNodeTypes(child, requiredNodeTypes)) {
return false;
}
}
return true;
}
/**
* Remove ExchangeNode between AggNode and ScanNode for the single backend.
* <p>
* This is used to generate "ScanNode->LocalShuffle->OnePhaseLocalAgg" for the single backend,
* which contains two steps:
* 1. Ignore the network cost for ExchangeNode when estimating cost model.
* 2. Remove ExchangeNode between AggNode and ScanNode when building fragments.
* <p>
* Specifically, transfer
* (AggNode->ExchangeNode)->([ProjectNode->]ScanNode)
* - *inputFragment sourceFragment
* to
* (AggNode->[ProjectNode->]ScanNode)
* - *sourceFragment
* That is, when matching this fragment pattern, remove inputFragment and return sourceFragment.
*
* @param inputFragment The input fragment to match the above pattern.
* @param context The context of building fragment, which contains all the fragments.
* @return SourceFragment if it matches th pattern, otherwise the original inputFragment.
*/
private PlanFragment removeExchangeNodeForLocalShuffleAgg(PlanFragment inputFragment, ExecPlan context) {
if (ConnectContext.get() == null) {
return inputFragment;
}
if (!canUseLocalShuffleAgg) {
return inputFragment;
}
SessionVariable sessionVariable = ConnectContext.get().getSessionVariable();
boolean enableLocalShuffleAgg = sessionVariable.isEnableLocalShuffleAgg()
&& sessionVariable.isEnablePipelineEngine()
&& GlobalStateMgr.getCurrentSystemInfo().isSingleBackendAndComputeNode();
if (!enableLocalShuffleAgg) {
return inputFragment;
}
if (!(inputFragment.getPlanRoot() instanceof ExchangeNode)) {
return inputFragment;
}
PlanNode sourceFragmentRoot = inputFragment.getPlanRoot().getChild(0);
if (!onlyContainNodeTypes(sourceFragmentRoot, ImmutableList.of(ScanNode.class, ProjectNode.class))) {
return inputFragment;
}
PlanFragment sourceFragment = sourceFragmentRoot.getFragment();
if (sourceFragment instanceof MultiCastPlanFragment) {
return inputFragment;
}
ArrayList<PlanFragment> fragments = context.getFragments();
for (int i = fragments.size() - 1; i >= 0; --i) {
if (fragments.get(i).equals(inputFragment)) {
fragments.remove(i);
break;
}
}
clearOlapScanNodePartitions(sourceFragment.getPlanRoot());
sourceFragment.clearDestination();
sourceFragment.clearOutputPartition();
return sourceFragment;
}
/**
* Clear partitionExprs of OlapScanNode (the bucket keys to pass to BE).
* <p>
* When partitionExprs of OlapScanNode are passed to BE, the post operators will use them as
* local shuffle partition exprs.
* Otherwise, the operators will use the original partition exprs (group by keys or join on keys).
* <p>
* The bucket keys can satisfy the required hash property of blocking aggregation except two scenarios:
* - OlapScanNode only has one tablet after pruned.
* - It is executed on the single BE.
* As for these two scenarios, which will generate ScanNode(k1)->LocalShuffle(c1)->BlockingAgg(c1),
* partitionExprs of OlapScanNode must be cleared to make BE use group by keys not bucket keys as
* local shuffle partition exprs.
*
* @param root The root node of the fragment which need to check whether to clear bucket keys of OlapScanNode.
*/
private void clearOlapScanNodePartitions(PlanNode root) {
if (root instanceof OlapScanNode) {
OlapScanNode scanNode = (OlapScanNode) root;
scanNode.setBucketExprs(Lists.newArrayList());
scanNode.setBucketColumns(Lists.newArrayList());
return;
}
if (root instanceof ExchangeNode) {
return;
}
for (PlanNode child : root.getChildren()) {
clearOlapScanNodePartitions(child);
}
}
private static class AggregateExprInfo {
public final ArrayList<Expr> groupExpr;
public final ArrayList<FunctionCallExpr> aggregateExpr;
public final ArrayList<Expr> partitionExpr;
public final ArrayList<Expr> intermediateExpr;
public AggregateExprInfo(ArrayList<Expr> groupExpr, ArrayList<FunctionCallExpr> aggregateExpr,
ArrayList<Expr> partitionExpr,
ArrayList<Expr> intermediateExpr) {
this.groupExpr = groupExpr;
this.aggregateExpr = aggregateExpr;
this.partitionExpr = partitionExpr;
this.intermediateExpr = intermediateExpr;
}
}
private AggregateExprInfo buildAggregateTuple(
Map<ColumnRefOperator, CallOperator> aggregations,
List<ColumnRefOperator> groupBys,
List<ColumnRefOperator> partitionBys,
TupleDescriptor outputTupleDesc,
ExecPlan context) {
ArrayList<Expr> groupingExpressions = Lists.newArrayList();
boolean forExchangePerf = aggregations.values().stream().anyMatch(aggFunc ->
aggFunc.getFnName().equals(FunctionSet.EXCHANGE_BYTES) ||
aggFunc.getFnName().equals(FunctionSet.EXCHANGE_SPEED)) &&
ConnectContext.get().getSessionVariable().getNewPlannerAggStage() == 1;
if (!forExchangePerf) {
for (ColumnRefOperator grouping : CollectionUtils.emptyIfNull(groupBys)) {
Expr groupingExpr = ScalarOperatorToExpr.buildExecExpression(grouping,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
groupingExpressions.add(groupingExpr);
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(grouping.getId()));
slotDesc.setType(groupingExpr.getType());
slotDesc.setIsNullable(groupingExpr.isNullable());
slotDesc.setIsMaterialized(true);
}
}
ArrayList<FunctionCallExpr> aggregateExprList = Lists.newArrayList();
ArrayList<Expr> intermediateAggrExprs = Lists.newArrayList();
for (Map.Entry<ColumnRefOperator, CallOperator> aggregation : aggregations.entrySet()) {
FunctionCallExpr aggExpr = (FunctionCallExpr) ScalarOperatorToExpr.buildExecExpression(
aggregation.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
aggregateExprList.add(aggExpr);
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(outputTupleDesc, new SlotId(aggregation.getKey().getId()));
slotDesc.setType(aggregation.getValue().getType());
slotDesc.setIsNullable(aggExpr.isNullable());
slotDesc.setIsMaterialized(true);
context.getColRefToExpr()
.put(aggregation.getKey(), new SlotRef(aggregation.getKey().toString(), slotDesc));
SlotDescriptor intermediateSlotDesc = new SlotDescriptor(slotDesc.getId(), slotDesc.getParent());
AggregateFunction aggrFn = (AggregateFunction) aggExpr.getFn();
Type intermediateType = aggrFn.getIntermediateType() != null ?
aggrFn.getIntermediateType() : aggrFn.getReturnType();
intermediateSlotDesc.setType(intermediateType);
intermediateSlotDesc.setIsNullable(aggrFn.isNullable());
intermediateSlotDesc.setIsMaterialized(true);
SlotRef intermediateSlotRef = new SlotRef(aggregation.getKey().toString(), intermediateSlotDesc);
intermediateAggrExprs.add(intermediateSlotRef);
}
ArrayList<Expr> partitionExpressions = Lists.newArrayList();
for (ColumnRefOperator column : CollectionUtils.emptyIfNull(partitionBys)) {
Expr partitionExpr = ScalarOperatorToExpr.buildExecExpression(column,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(column.getId()));
slotDesc.setType(partitionExpr.getType());
slotDesc.setIsNullable(partitionExpr.isNullable());
slotDesc.setIsMaterialized(true);
context.getColRefToExpr().put(column, new SlotRef(column.toString(), slotDesc));
partitionExpressions.add(new SlotRef(slotDesc));
}
outputTupleDesc.computeMemLayout();
return new AggregateExprInfo(groupingExpressions, aggregateExprList, partitionExpressions,
intermediateAggrExprs);
}
@Override
public PlanFragment visitPhysicalHashAggregate(OptExpression optExpr, ExecPlan context) {
PhysicalHashAggregateOperator node = (PhysicalHashAggregateOperator) optExpr.getOp();
PlanFragment originalInputFragment = visit(optExpr.inputAt(0), context);
PlanFragment inputFragment = removeExchangeNodeForLocalShuffleAgg(originalInputFragment, context);
boolean withLocalShuffle = inputFragment != originalInputFragment;
Map<ColumnRefOperator, CallOperator> aggregations = node.getAggregations();
List<ColumnRefOperator> groupBys = node.getGroupBys();
List<ColumnRefOperator> partitionBys = node.getPartitionByColumns();
TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor();
AggregateExprInfo aggExpr =
buildAggregateTuple(aggregations, groupBys, partitionBys, outputTupleDesc, context);
ArrayList<Expr> groupingExpressions = aggExpr.groupExpr;
ArrayList<FunctionCallExpr> aggregateExprList = aggExpr.aggregateExpr;
ArrayList<Expr> partitionExpressions = aggExpr.partitionExpr;
ArrayList<Expr> intermediateAggrExprs = aggExpr.intermediateExpr;
AggregationNode aggregationNode;
if (node.getType().isLocal() && node.isSplit()) {
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggregationNode.unsetNeedsFinalize();
aggregationNode.setIsPreagg(node.isUseStreamingPreAgg());
aggregationNode.setIntermediateTuple();
if (!partitionExpressions.isEmpty()) {
inputFragment.setOutputPartition(DataPartition.hashPartitioned(partitionExpressions));
}
if (!withLocalShuffle && !node.isUseStreamingPreAgg() &&
hasColocateOlapScanChildInFragment(aggregationNode)) {
aggregationNode.setColocate(true);
}
} else if (node.getType().isGlobal() || (node.getType().isLocal() && !node.isSplit())) {
if (node.hasSingleDistinct()) {
for (int i = 0; i < aggregateExprList.size(); i++) {
if (i != node.getSingleDistinctFunctionPos()) {
aggregateExprList.get(i).setMergeAggFn();
}
}
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.SECOND);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
aggInfo);
} else if (!node.isSplit()) {
rewriteAggDistinctFirstStageFunction(aggregateExprList);
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
aggInfo);
} else {
aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn);
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.SECOND_MERGE);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
aggInfo);
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
aggregationNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
aggregationNode.setLimit(node.getLimit());
if (!withLocalShuffle && hasColocateOlapScanChildInFragment(aggregationNode)) {
aggregationNode.setColocate(true);
}
} else if (node.getType().isDistinctGlobal()) {
aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn);
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST_MERGE);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggregationNode.unsetNeedsFinalize();
aggregationNode.setIntermediateTuple();
if (!withLocalShuffle && hasColocateOlapScanChildInFragment(aggregationNode)) {
aggregationNode.setColocate(true);
}
} else if (node.getType().isDistinctLocal()) {
for (int i = 0; i < aggregateExprList.size(); i++) {
if (i != node.getSingleDistinctFunctionPos()) {
aggregateExprList.get(i).setMergeAggFn();
}
}
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.SECOND);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggregationNode.unsetNeedsFinalize();
aggregationNode.setIsPreagg(node.isUseStreamingPreAgg());
aggregationNode.setIntermediateTuple();
} else {
throw unsupportedException("Not support aggregate type : " + node.getType());
}
aggregationNode.setUseSortAgg(node.isUseSortAgg());
aggregationNode.setStreamingPreaggregationMode(context.getConnectContext().
getSessionVariable().getStreamingPreaggregationMode());
aggregationNode.setHasNullableGenerateChild();
aggregationNode.computeStatistics(optExpr.getStatistics());
if (node.isOnePhaseAgg() || node.isMergedLocalAgg() || node.getType().isDistinctGlobal()) {
if (optExpr.getLogicalProperty().oneTabletProperty().supportOneTabletOpt) {
clearOlapScanNodePartitions(aggregationNode);
}
inputFragment.setAssignScanRangesPerDriverSeq(!withLocalShuffle);
aggregationNode.setWithLocalShuffle(withLocalShuffle);
aggregationNode.setIdenticallyDistributed(true);
}
aggregationNode.getAggInfo().setIntermediateAggrExprs(intermediateAggrExprs);
inputFragment.setPlanRoot(aggregationNode);
return inputFragment;
}
public boolean hasColocateOlapScanChildInFragment(PlanNode node) {
if (node instanceof OlapScanNode) {
ColocateTableIndex colocateIndex = GlobalStateMgr.getCurrentColocateIndex();
OlapScanNode scanNode = (OlapScanNode) node;
if (colocateIndex.isColocateTable(scanNode.getOlapTable().getId())) {
return true;
}
}
if (node instanceof ExchangeNode) {
return false;
}
boolean hasOlapScanChild = false;
for (PlanNode child : node.getChildren()) {
hasOlapScanChild |= hasColocateOlapScanChildInFragment(child);
}
return hasOlapScanChild;
}
public void rewriteAggDistinctFirstStageFunction(List<FunctionCallExpr> aggregateExprList) {
int singleDistinctCount = 0;
int singleDistinctIndex = 0;
FunctionCallExpr functionCallExpr = null;
for (int i = 0; i < aggregateExprList.size(); ++i) {
FunctionCallExpr callExpr = aggregateExprList.get(i);
if (callExpr.isDistinct()) {
++singleDistinctCount;
functionCallExpr = callExpr;
singleDistinctIndex = i;
}
}
if (singleDistinctCount == 1) {
FunctionCallExpr replaceExpr = null;
final String functionName = functionCallExpr.getFnName().getFunction();
if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) {
replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_COUNT, functionCallExpr.getParams());
replaceExpr.setFn(Expr.getBuiltinFunction(FunctionSet.MULTI_DISTINCT_COUNT,
new Type[] {functionCallExpr.getChild(0).getType()},
IS_NONSTRICT_SUPERTYPE_OF));
replaceExpr.getParams().setIsDistinct(false);
} else if (functionName.equalsIgnoreCase(FunctionSet.SUM)) {
replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_SUM, functionCallExpr.getParams());
Function multiDistinctSum = DecimalV3FunctionAnalyzer.convertSumToMultiDistinctSum(
functionCallExpr.getFn(), functionCallExpr.getChild(0).getType());
replaceExpr.setFn(multiDistinctSum);
replaceExpr.getParams().setIsDistinct(false);
}
Preconditions.checkState(replaceExpr != null);
ExpressionAnalyzer.analyzeExpressionIgnoreSlot(replaceExpr, ConnectContext.get());
aggregateExprList.set(singleDistinctIndex, replaceExpr);
}
}
@Override
public PlanFragment visitPhysicalDistribution(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalDistributionOperator distribution = (PhysicalDistributionOperator) optExpr.getOp();
ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(),
inputFragment.getPlanRoot(), distribution.getDistributionSpec().getType());
DataPartition dataPartition;
if (DistributionSpec.DistributionType.GATHER.equals(distribution.getDistributionSpec().getType())) {
exchangeNode.setNumInstances(1);
dataPartition = DataPartition.UNPARTITIONED;
GatherDistributionSpec spec = (GatherDistributionSpec) distribution.getDistributionSpec();
if (spec.hasLimit()) {
exchangeNode.setLimit(spec.getLimit());
}
} else if (DistributionSpec.DistributionType.BROADCAST
.equals(distribution.getDistributionSpec().getType())) {
exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances());
dataPartition = DataPartition.UNPARTITIONED;
} else if (DistributionSpec.DistributionType.SHUFFLE.equals(distribution.getDistributionSpec().getType())) {
exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances());
List<ColumnRefOperator> partitionColumns =
getShuffleColumns((HashDistributionSpec) distribution.getDistributionSpec());
List<Expr> distributeExpressions =
partitionColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
dataPartition = DataPartition.hashPartitioned(distributeExpressions);
} else {
throw new StarRocksPlannerException("Unsupport exchange type : "
+ distribution.getDistributionSpec().getType(), INTERNAL_ERROR);
}
exchangeNode.setDataPartition(dataPartition);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition);
fragment.setQueryGlobalDicts(distribution.getGlobalDicts());
inputFragment.setDestination(exchangeNode);
inputFragment.setOutputPartition(dataPartition);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalTopN(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalTopNOperator topN = (PhysicalTopNOperator) optExpr.getOp();
Preconditions.checkState(topN.getOffset() >= 0);
if (!topN.isSplit()) {
return buildPartialTopNFragment(optExpr, context, topN.getPartitionByColumns(),
topN.getPartitionLimit(), topN.getOrderSpec(),
topN.getTopNType(), topN.getLimit(), topN.getOffset(), inputFragment);
} else {
return buildFinalTopNFragment(context, topN.getTopNType(), topN.getLimit(), topN.getOffset(),
inputFragment, optExpr);
}
}
private PlanFragment buildFinalTopNFragment(ExecPlan context, TopNType topNType, long limit, long offset,
PlanFragment inputFragment,
OptExpression optExpr) {
ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(),
inputFragment.getPlanRoot(),
DistributionSpec.DistributionType.GATHER);
exchangeNode.setNumInstances(1);
DataPartition dataPartition = DataPartition.UNPARTITIONED;
exchangeNode.setDataPartition(dataPartition);
Preconditions.checkState(inputFragment.getPlanRoot() instanceof SortNode);
SortNode sortNode = (SortNode) inputFragment.getPlanRoot();
sortNode.setTopNType(topNType);
exchangeNode.setMergeInfo(sortNode.getSortInfo(), offset);
exchangeNode.computeStatistics(optExpr.getStatistics());
if (TopNType.ROW_NUMBER.equals(topNType)) {
exchangeNode.setLimit(limit);
} else {
exchangeNode.unsetLimit();
}
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition);
inputFragment.setDestination(exchangeNode);
inputFragment.setOutputPartition(dataPartition);
fragment.setQueryGlobalDicts(inputFragment.getQueryGlobalDicts());
context.getFragments().add(fragment);
return fragment;
}
private PlanFragment buildPartialTopNFragment(OptExpression optExpr, ExecPlan context,
List<ColumnRefOperator> partitionByColumns, long partitionLimit,
OrderSpec orderSpec, TopNType topNType, long limit, long offset,
PlanFragment inputFragment) {
List<Expr> resolvedTupleExprs = Lists.newArrayList();
List<Expr> partitionExprs = Lists.newArrayList();
List<Expr> sortExprs = Lists.newArrayList();
TupleDescriptor sortTuple = context.getDescTbl().createTupleDescriptor();
if (CollectionUtils.isNotEmpty(partitionByColumns)) {
for (ColumnRefOperator partitionByColumn : partitionByColumns) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(partitionByColumn,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
partitionExprs.add(expr);
}
}
for (Ordering ordering : orderSpec.getOrderDescs()) {
Expr sortExpr = ScalarOperatorToExpr.buildExecExpression(ordering.getColumnRef(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(ordering.getColumnRef().getId()));
slotDesc.initFromExpr(sortExpr);
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(sortExpr.isNullable());
slotDesc.setType(sortExpr.getType());
context.getColRefToExpr()
.put(ordering.getColumnRef(), new SlotRef(ordering.getColumnRef().toString(), slotDesc));
resolvedTupleExprs.add(sortExpr);
sortExprs.add(new SlotRef(slotDesc));
}
ColumnRefSet columnRefSet = optExpr.inputAt(0).getLogicalProperty().getOutputColumns();
for (int i = 0; i < columnRefSet.getColumnIds().length; ++i) {
/*
* Add column not be used in ordering
*/
ColumnRefOperator columnRef = columnRefFactory.getColumnRef(columnRefSet.getColumnIds()[i]);
if (orderSpec.getOrderDescs().stream().map(Ordering::getColumnRef)
.noneMatch(c -> c.equals(columnRef))) {
Expr outputExpr = ScalarOperatorToExpr.buildExecExpression(columnRef,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(columnRef.getId()));
slotDesc.initFromExpr(outputExpr);
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(outputExpr.isNullable());
slotDesc.setType(outputExpr.getType());
context.getColRefToExpr().put(columnRef, new SlotRef(columnRef.toString(), slotDesc));
resolvedTupleExprs.add(outputExpr);
}
}
sortTuple.computeMemLayout();
SortInfo sortInfo = new SortInfo(partitionExprs, partitionLimit, sortExprs,
orderSpec.getOrderDescs().stream().map(Ordering::isAscending).collect(Collectors.toList()),
orderSpec.getOrderDescs().stream().map(Ordering::isNullsFirst).collect(Collectors.toList()));
sortInfo.setMaterializedTupleInfo(sortTuple, resolvedTupleExprs);
SortNode sortNode = new SortNode(
context.getNextNodeId(),
inputFragment.getPlanRoot(),
sortInfo,
limit != Operator.DEFAULT_LIMIT,
limit == Operator.DEFAULT_LIMIT,
0);
sortNode.setTopNType(topNType);
sortNode.setLimit(limit);
sortNode.setOffset(offset);
sortNode.resolvedTupleExprs = resolvedTupleExprs;
sortNode.setHasNullableGenerateChild();
sortNode.computeStatistics(optExpr.getStatistics());
if (shouldBuildGlobalRuntimeFilter()) {
sortNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl());
}
inputFragment.setPlanRoot(sortNode);
return inputFragment;
}
private void setJoinPushDown(JoinNode node) {
node.setIsPushDown(ConnectContext.get().getSessionVariable().isHashJoinPushDownRightTable()
&& (node.getJoinOp().isInnerJoin() || node.getJoinOp().isLeftSemiJoin() ||
node.getJoinOp().isRightJoin()));
}
private boolean shouldBuildGlobalRuntimeFilter() {
return ConnectContext.get() != null &&
(ConnectContext.get().getSessionVariable().getEnableGlobalRuntimeFilter() ||
ConnectContext.get().getSessionVariable().isEnablePipelineEngine());
}
@Override
public PlanFragment visitPhysicalHashJoin(OptExpression optExpr, ExecPlan context) {
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
return visitPhysicalJoin(leftFragment, rightFragment, optExpr, context);
}
private List<Expr> extractConjuncts(ScalarOperator predicate, ExecPlan context) {
return Utils.extractConjuncts(predicate).stream()
.map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
}
private void setNullableForJoin(JoinOperator joinOperator,
PlanFragment leftFragment, PlanFragment rightFragment, ExecPlan context) {
Set<TupleId> nullableTupleIds = new HashSet<>();
nullableTupleIds.addAll(leftFragment.getPlanRoot().getNullableTupleIds());
nullableTupleIds.addAll(rightFragment.getPlanRoot().getNullableTupleIds());
if (joinOperator.isLeftOuterJoin()) {
nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds());
} else if (joinOperator.isRightOuterJoin()) {
nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds());
} else if (joinOperator.isFullOuterJoin()) {
nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds());
nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds());
}
for (TupleId tupleId : nullableTupleIds) {
TupleDescriptor tupleDescriptor = context.getDescTbl().getTupleDesc(tupleId);
tupleDescriptor.getSlots().forEach(slot -> slot.setIsNullable(true));
tupleDescriptor.computeMemLayout();
}
}
@Override
public PlanFragment visitPhysicalNestLoopJoin(OptExpression optExpr, ExecPlan context) {
PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp();
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
List<Expr> conjuncts = extractConjuncts(node.getPredicate(), context);
List<Expr> joinOnConjuncts = extractConjuncts(node.getOnPredicate(), context);
List<Expr> probePartitionByExprs = Lists.newArrayList();
DistributionSpec leftDistributionSpec =
optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec();
DistributionSpec rightDistributionSpec =
optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec();
if (leftDistributionSpec instanceof HashDistributionSpec &&
rightDistributionSpec instanceof HashDistributionSpec) {
probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context);
}
setNullableForJoin(node.getJoinType(), leftFragment, rightFragment, context);
NestLoopJoinNode joinNode = new NestLoopJoinNode(context.getNextNodeId(),
leftFragment.getPlanRoot(), rightFragment.getPlanRoot(),
null, node.getJoinType(), Lists.newArrayList(), joinOnConjuncts);
joinNode.setLimit(node.getLimit());
joinNode.computeStatistics(optExpr.getStatistics());
joinNode.addConjuncts(conjuncts);
joinNode.setProbePartitionByExprs(probePartitionByExprs);
rightFragment.getPlanRoot().setFragment(leftFragment);
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
if (!(joinNode.getChild(1) instanceof ExchangeNode)) {
joinNode.setReplicated(true);
}
if (shouldBuildGlobalRuntimeFilter()) {
joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl());
}
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
}
@Override
public PlanFragment visitPhysicalMergeJoin(OptExpression optExpr, ExecPlan context) {
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
PlanNode leftPlanRoot = leftFragment.getPlanRoot();
PlanNode rightPlanRoot = rightFragment.getPlanRoot();
OptExpression leftExpression = optExpr.inputAt(0);
OptExpression rightExpression = optExpr.inputAt(1);
boolean needDealSort = leftExpression.getInputs().size() > 0 && rightExpression.getInputs().size() > 0;
if (needDealSort) {
optExpr.setChild(0, leftExpression.inputAt(0));
optExpr.setChild(1, rightExpression.inputAt(0));
leftFragment.setPlanRoot(leftPlanRoot.getChild(0));
rightFragment.setPlanRoot(rightPlanRoot.getChild(0));
}
PlanFragment planFragment = visitPhysicalJoin(leftFragment, rightFragment, optExpr, context);
if (needDealSort) {
leftExpression.setChild(0, optExpr.inputAt(0));
rightExpression.setChild(0, optExpr.inputAt(1));
optExpr.setChild(0, leftExpression);
optExpr.setChild(1, rightExpression);
planFragment.getPlanRoot().setChild(0, leftPlanRoot);
planFragment.getPlanRoot().setChild(1, rightPlanRoot);
}
return planFragment;
}
private List<ColumnRefOperator> getShuffleColumns(HashDistributionSpec spec) {
List<Integer> columnRefs = spec.getShuffleColumns();
Preconditions.checkState(!columnRefs.isEmpty());
List<ColumnRefOperator> shuffleColumns = new ArrayList<>();
for (int columnId : columnRefs) {
shuffleColumns.add(columnRefFactory.getColumnRef(columnId));
}
return shuffleColumns;
}
private List<Expr> getShuffleExprs(HashDistributionSpec hashDistributionSpec, ExecPlan context) {
List<ColumnRefOperator> shuffleColumns = getShuffleColumns(hashDistributionSpec);
return shuffleColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
}
private PlanFragment visitPhysicalJoin(PlanFragment leftFragment, PlanFragment rightFragment,
OptExpression optExpr, ExecPlan context) {
PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp();
ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns();
ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns();
JoinOperator joinOperator = node.getJoinType();
Preconditions.checkState(!joinOperator.isCrossJoin(), "should not be cross join");
PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot();
PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot();
if (leftFragmentPlanRoot instanceof DecodeNode) {
leftFragmentPlanRoot = leftFragmentPlanRoot.getChild(0);
}
if (rightFragmentPlanRoot instanceof DecodeNode) {
rightFragmentPlanRoot = rightFragmentPlanRoot.getChild(0);
}
List<Expr> probePartitionByExprs = Lists.newArrayList();
DistributionSpec leftDistributionSpec =
optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec();
DistributionSpec rightDistributionSpec =
optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec();
if (leftDistributionSpec instanceof HashDistributionSpec &&
rightDistributionSpec instanceof HashDistributionSpec) {
probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context);
}
JoinNode.DistributionMode distributionMode =
inferDistributionMode(optExpr, leftFragmentPlanRoot, rightFragmentPlanRoot);
JoinExprInfo joinExpr = buildJoinExpr(optExpr, context);
List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts;
List<Expr> otherJoinConjuncts = joinExpr.otherJoin;
List<Expr> conjuncts = joinExpr.conjuncts;
setNullableForJoin(joinOperator, leftFragment, rightFragment, context);
JoinNode joinNode;
if (node instanceof PhysicalHashJoinOperator) {
joinNode = new HashJoinNode(
context.getNextNodeId(),
leftFragment.getPlanRoot(), rightFragment.getPlanRoot(),
joinOperator, eqJoinConjuncts, otherJoinConjuncts);
} else if (node instanceof PhysicalMergeJoinOperator) {
joinNode = new MergeJoinNode(
context.getNextNodeId(),
leftFragment.getPlanRoot(), rightFragment.getPlanRoot(),
joinOperator, eqJoinConjuncts, otherJoinConjuncts);
} else {
throw new StarRocksPlannerException("unknown join operator: " + node, INTERNAL_ERROR);
}
if (node.getProjection() != null) {
ColumnRefSet outputColumns = new ColumnRefSet();
for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) {
outputColumns.union(s.getUsedColumns());
}
for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) {
outputColumns.union(s.getUsedColumns());
}
outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet()));
joinNode.setOutputSlots(outputColumns.getStream().collect(Collectors.toList()));
}
joinNode.setDistributionMode(distributionMode);
joinNode.getConjuncts().addAll(conjuncts);
joinNode.setLimit(node.getLimit());
joinNode.computeStatistics(optExpr.getStatistics());
joinNode.setProbePartitionByExprs(probePartitionByExprs);
if (shouldBuildGlobalRuntimeFilter()) {
joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl());
}
return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode);
}
private boolean isExchangeWithDistributionType(PlanNode node, DistributionSpec.DistributionType expectedType) {
if (!(node instanceof ExchangeNode)) {
return false;
}
ExchangeNode exchangeNode = (ExchangeNode) node;
return Objects.equals(exchangeNode.getDistributionType(), expectedType);
}
private boolean isColocateJoin(OptExpression optExpression) {
return optExpression.getRequiredProperties().stream().allMatch(
physicalPropertySet -> {
if (!physicalPropertySet.getDistributionProperty().isShuffle()) {
return false;
}
HashDistributionDesc.SourceType hashSourceType =
((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec()))
.getHashDistributionDesc().getSourceType();
return hashSourceType.equals(HashDistributionDesc.SourceType.LOCAL);
});
}
public boolean isShuffleJoin(OptExpression optExpression) {
return optExpression.getRequiredProperties().stream().allMatch(
physicalPropertySet -> {
if (!physicalPropertySet.getDistributionProperty().isShuffle()) {
return false;
}
HashDistributionDesc.SourceType hashSourceType =
((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec()))
.getHashDistributionDesc().getSourceType();
return hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_JOIN) ||
hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_ENFORCE) ||
hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_AGG);
});
}
public PlanFragment computeBucketShufflePlanFragment(ExecPlan context,
PlanFragment stayFragment,
PlanFragment removeFragment, JoinNode hashJoinNode) {
hashJoinNode.setLocalHashBucket(true);
hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs());
removeFragment.getChild(0)
.setOutputPartition(new DataPartition(TPartitionType.BUCKET_SHUFFLE_HASH_PARTITIONED,
removeFragment.getDataPartition().getPartitionExprs()));
context.getFragments().remove(removeFragment);
context.getFragments().remove(stayFragment);
context.getFragments().add(stayFragment);
stayFragment.setPlanRoot(hashJoinNode);
stayFragment.addChildren(removeFragment.getChildren());
stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts());
return stayFragment;
}
public PlanFragment computeShuffleHashBucketPlanFragment(ExecPlan context,
PlanFragment stayFragment,
PlanFragment removeFragment,
JoinNode hashJoinNode) {
hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs());
DataPartition dataPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
removeFragment.getDataPartition().getPartitionExprs());
removeFragment.getChild(0).setOutputPartition(dataPartition);
context.getFragments().remove(removeFragment);
context.getFragments().remove(stayFragment);
context.getFragments().add(stayFragment);
stayFragment.setPlanRoot(hashJoinNode);
stayFragment.addChildren(removeFragment.getChildren());
stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts());
return stayFragment;
}
@Override
public PlanFragment visitPhysicalAssertOneRow(OptExpression optExpression, ExecPlan context) {
PlanFragment inputFragment = visit(optExpression.inputAt(0), context);
for (TupleId id : inputFragment.getPlanRoot().getTupleIds()) {
context.getDescTbl().getTupleDesc(id).getSlots().forEach(s -> s.setIsNullable(true));
}
PhysicalAssertOneRowOperator assertOneRow = (PhysicalAssertOneRowOperator) optExpression.getOp();
AssertNumRowsNode node =
new AssertNumRowsNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
new AssertNumRowsElement(assertOneRow.getCheckRows(), assertOneRow.getTips(),
assertOneRow.getAssertion()));
node.computeStatistics(optExpression.getStatistics());
inputFragment.setPlanRoot(node);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalAnalytic(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalWindowOperator node = (PhysicalWindowOperator) optExpr.getOp();
List<Expr> analyticFnCalls = new ArrayList<>();
TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor();
for (Map.Entry<ColumnRefOperator, CallOperator> analyticCall : node.getAnalyticCall().entrySet()) {
Expr analyticFunction = ScalarOperatorToExpr.buildExecExpression(analyticCall.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
analyticFnCalls.add(analyticFunction);
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(outputTupleDesc, new SlotId(analyticCall.getKey().getId()));
slotDesc.setType(analyticFunction.getType());
slotDesc.setIsNullable(analyticFunction.isNullable());
slotDesc.setIsMaterialized(true);
context.getColRefToExpr()
.put(analyticCall.getKey(), new SlotRef(analyticCall.getKey().toString(), slotDesc));
}
List<Expr> partitionExprs =
node.getPartitionExpressions().stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
List<OrderByElement> orderByElements = node.getOrderByElements().stream().map(e -> new OrderByElement(
ScalarOperatorToExpr.buildExecExpression(e.getColumnRef(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())),
e.isAscending(), e.isNullsFirst())).collect(Collectors.toList());
AnalyticEvalNode analyticEvalNode = new AnalyticEvalNode(
context.getNextNodeId(),
inputFragment.getPlanRoot(),
analyticFnCalls,
partitionExprs,
orderByElements,
node.getAnalyticWindow(),
node.isUseHashBasedPartition(),
null, outputTupleDesc, null, null,
context.getDescTbl().createTupleDescriptor());
analyticEvalNode.setSubstitutedPartitionExprs(partitionExprs);
analyticEvalNode.setLimit(node.getLimit());
analyticEvalNode.setHasNullableGenerateChild();
analyticEvalNode.computeStatistics(optExpr.getStatistics());
if (hasColocateOlapScanChildInFragment(analyticEvalNode)) {
analyticEvalNode.setColocate(true);
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
analyticEvalNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
PlanNode root = inputFragment.getPlanRoot();
if (root instanceof SortNode) {
SortNode sortNode = (SortNode) root;
sortNode.setAnalyticPartitionExprs(analyticEvalNode.getPartitionExprs());
}
if (optExpr.getLogicalProperty().oneTabletProperty().supportOneTabletOpt) {
clearOlapScanNodePartitions(analyticEvalNode);
}
inputFragment.setPlanRoot(analyticEvalNode);
return inputFragment;
}
private PlanFragment buildSetOperation(OptExpression optExpr, ExecPlan context, OperatorType operatorType) {
PhysicalSetOperation setOperation = (PhysicalSetOperation) optExpr.getOp();
TupleDescriptor setOperationTuple = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) {
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(setOperationTuple, new SlotId(columnRefOperator.getId()));
slotDesc.setType(columnRefOperator.getType());
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(columnRefOperator.isNullable());
context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc));
}
SetOperationNode setOperationNode;
boolean isUnion = false;
if (operatorType.equals(OperatorType.PHYSICAL_UNION)) {
isUnion = true;
setOperationNode = new UnionNode(context.getNextNodeId(), setOperationTuple.getId());
setOperationNode.setFirstMaterializedChildIdx_(optExpr.arity());
} else if (operatorType.equals(OperatorType.PHYSICAL_EXCEPT)) {
setOperationNode = new ExceptNode(context.getNextNodeId(), setOperationTuple.getId());
} else if (operatorType.equals(OperatorType.PHYSICAL_INTERSECT)) {
setOperationNode = new IntersectNode(context.getNextNodeId(), setOperationTuple.getId());
} else {
throw new StarRocksPlannerException("Unsupported set operation", INTERNAL_ERROR);
}
List<Map<Integer, Integer>> outputSlotIdToChildSlotIdMaps = new ArrayList<>();
for (int childIdx = 0; childIdx < optExpr.arity(); ++childIdx) {
Map<Integer, Integer> slotIdMap = new HashMap<>();
List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(childIdx);
Preconditions.checkState(childOutput.size() == setOperation.getOutputColumnRefOp().size());
for (int columnIdx = 0; columnIdx < setOperation.getOutputColumnRefOp().size(); ++columnIdx) {
Integer resultColumnIdx = setOperation.getOutputColumnRefOp().get(columnIdx).getId();
slotIdMap.put(resultColumnIdx, childOutput.get(columnIdx).getId());
}
outputSlotIdToChildSlotIdMaps.add(slotIdMap);
Preconditions.checkState(slotIdMap.size() == setOperation.getOutputColumnRefOp().size());
}
setOperationNode.setOutputSlotIdToChildSlotIdMaps(outputSlotIdToChildSlotIdMaps);
Preconditions.checkState(optExpr.getInputs().size() == setOperation.getChildOutputColumns().size());
PlanFragment setOperationFragment =
new PlanFragment(context.getNextFragmentId(), setOperationNode, DataPartition.RANDOM);
List<List<Expr>> materializedResultExprLists = Lists.newArrayList();
for (int i = 0; i < optExpr.getInputs().size(); i++) {
List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(i);
PlanFragment fragment = visit(optExpr.getInputs().get(i), context);
List<Expr> materializedExpressions = Lists.newArrayList();
for (ColumnRefOperator ref : childOutput) {
SlotDescriptor slotDescriptor = context.getDescTbl().getSlotDesc(new SlotId(ref.getId()));
materializedExpressions.add(new SlotRef(slotDescriptor));
}
materializedResultExprLists.add(materializedExpressions);
if (isUnion) {
fragment.setOutputPartition(DataPartition.RANDOM);
} else {
fragment.setOutputPartition(DataPartition.hashPartitioned(materializedExpressions));
}
ExchangeNode exchangeNode =
new ExchangeNode(context.getNextNodeId(), fragment.getPlanRoot(), fragment.getDataPartition());
exchangeNode.setFragment(setOperationFragment);
fragment.setDestination(exchangeNode);
setOperationNode.addChild(exchangeNode);
}
setOperationNode.setHasNullableGenerateChild();
List<Expr> setOutputList = Lists.newArrayList();
for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) {
SlotDescriptor slotDesc = context.getDescTbl().getSlotDesc(new SlotId(columnRefOperator.getId()));
slotDesc.setIsNullable(slotDesc.getIsNullable() | setOperationNode.isHasNullableGenerateChild());
setOutputList.add(new SlotRef(String.valueOf(columnRefOperator.getId()), slotDesc));
}
setOperationTuple.computeMemLayout();
setOperationNode.setSetOperationOutputList(setOutputList);
setOperationNode.setMaterializedResultExprLists_(materializedResultExprLists);
setOperationNode.setLimit(setOperation.getLimit());
setOperationNode.computeStatistics(optExpr.getStatistics());
context.getFragments().add(setOperationFragment);
return setOperationFragment;
}
@Override
public PlanFragment visitPhysicalUnion(OptExpression optExpr, ExecPlan context) {
return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_UNION);
}
@Override
public PlanFragment visitPhysicalExcept(OptExpression optExpr, ExecPlan context) {
return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_EXCEPT);
}
@Override
public PlanFragment visitPhysicalIntersect(OptExpression optExpr, ExecPlan context) {
return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_INTERSECT);
}
@Override
public PlanFragment visitPhysicalRepeat(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalRepeatOperator repeatOperator = (PhysicalRepeatOperator) optExpr.getOp();
TupleDescriptor outputGroupingTuple = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : repeatOperator.getOutputGrouping()) {
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(outputGroupingTuple, new SlotId(columnRefOperator.getId()));
slotDesc.setType(columnRefOperator.getType());
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(columnRefOperator.isNullable());
context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc));
}
outputGroupingTuple.computeMemLayout();
List<Set<Integer>> repeatSlotIdList = new ArrayList<>();
for (List<ColumnRefOperator> repeat : repeatOperator.getRepeatColumnRef()) {
repeatSlotIdList.add(
repeat.stream().map(ColumnRefOperator::getId).collect(Collectors.toSet()));
}
RepeatNode repeatNode = new RepeatNode(
context.getNextNodeId(),
inputFragment.getPlanRoot(),
outputGroupingTuple,
repeatSlotIdList,
repeatOperator.getGroupingIds());
List<ScalarOperator> predicates = Utils.extractConjuncts(repeatOperator.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
repeatNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
repeatNode.computeStatistics(optExpr.getStatistics());
inputFragment.setPlanRoot(repeatNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalFilter(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalFilterOperator filter = (PhysicalFilterOperator) optExpr.getOp();
List<Expr> predicates = Utils.extractConjuncts(filter.getPredicate()).stream()
.map(d -> ScalarOperatorToExpr.buildExecExpression(d,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
SelectNode selectNode =
new SelectNode(context.getNextNodeId(), inputFragment.getPlanRoot(), predicates);
selectNode.setLimit(filter.getLimit());
selectNode.computeStatistics(optExpr.getStatistics());
inputFragment.setPlanRoot(selectNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalTableFunction(OptExpression optExpression, ExecPlan context) {
PlanFragment inputFragment = visit(optExpression.inputAt(0), context);
PhysicalTableFunctionOperator physicalTableFunction = (PhysicalTableFunctionOperator) optExpression.getOp();
TupleDescriptor udtfOutputTuple = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : physicalTableFunction.getOutputColRefs()) {
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(udtfOutputTuple, new SlotId(columnRefOperator.getId()));
slotDesc.setType(columnRefOperator.getType());
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(columnRefOperator.isNullable());
context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc));
}
udtfOutputTuple.computeMemLayout();
TableFunctionNode tableFunctionNode = new TableFunctionNode(context.getNextNodeId(),
inputFragment.getPlanRoot(),
udtfOutputTuple,
physicalTableFunction.getFn(),
physicalTableFunction.getFnParamColumnRefs().stream().map(ColumnRefOperator::getId)
.collect(Collectors.toList()),
physicalTableFunction.getOuterColRefs().stream().map(ColumnRefOperator::getId)
.collect(Collectors.toList()),
physicalTableFunction.getFnResultColRefs().stream().map(ColumnRefOperator::getId)
.collect(Collectors.toList())
);
tableFunctionNode.computeStatistics(optExpression.getStatistics());
tableFunctionNode.setLimit(physicalTableFunction.getLimit());
inputFragment.setPlanRoot(tableFunctionNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalLimit(OptExpression optExpression, ExecPlan context) {
return visit(optExpression.inputAt(0), context);
}
@Override
public PlanFragment visitPhysicalCTEConsume(OptExpression optExpression, ExecPlan context) {
PhysicalCTEConsumeOperator consume = (PhysicalCTEConsumeOperator) optExpression.getOp();
int cteId = consume.getCteId();
MultiCastPlanFragment cteFragment = (MultiCastPlanFragment) context.getCteProduceFragments().get(cteId);
ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(),
cteFragment.getPlanRoot(), DistributionSpec.DistributionType.SHUFFLE);
exchangeNode.setReceiveColumns(consume.getCteOutputColumnRefMap().values().stream()
.map(ColumnRefOperator::getId).collect(Collectors.toList()));
exchangeNode.setDataPartition(cteFragment.getDataPartition());
exchangeNode.setNumInstances(cteFragment.getPlanRoot().getNumInstances());
PlanFragment consumeFragment = new PlanFragment(context.getNextFragmentId(), exchangeNode,
cteFragment.getDataPartition());
Map<ColumnRefOperator, ScalarOperator> projectMap = Maps.newHashMap();
projectMap.putAll(consume.getCteOutputColumnRefMap());
consumeFragment = buildProjectNode(optExpression, new Projection(projectMap), consumeFragment, context);
consumeFragment.setQueryGlobalDicts(cteFragment.getQueryGlobalDicts());
consumeFragment.setLoadGlobalDicts(cteFragment.getLoadGlobalDicts());
if (consume.getPredicate() != null) {
List<Expr> predicates = Utils.extractConjuncts(consume.getPredicate()).stream()
.map(d -> ScalarOperatorToExpr.buildExecExpression(d,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
SelectNode selectNode =
new SelectNode(context.getNextNodeId(), consumeFragment.getPlanRoot(), predicates);
selectNode.computeStatistics(optExpression.getStatistics());
consumeFragment.setPlanRoot(selectNode);
}
if (consume.hasLimit()) {
consumeFragment.getPlanRoot().setLimit(consume.getLimit());
}
cteFragment.getDestNodeList().add(exchangeNode);
consumeFragment.addChild(cteFragment);
context.getFragments().add(consumeFragment);
return consumeFragment;
}
@Override
public PlanFragment visitPhysicalCTEProduce(OptExpression optExpression, ExecPlan context) {
PlanFragment child = visit(optExpression.inputAt(0), context);
int cteId = ((PhysicalCTEProduceOperator) optExpression.getOp()).getCteId();
context.getFragments().remove(child);
MultiCastPlanFragment cteProduce = new MultiCastPlanFragment(child);
List<Expr> outputs = Lists.newArrayList();
optExpression.getOutputColumns().getStream()
.forEach(i -> outputs.add(context.getColRefToExpr().get(columnRefFactory.getColumnRef(i))));
cteProduce.setOutputExprs(outputs);
context.getCteProduceFragments().put(cteId, cteProduce);
context.getFragments().add(cteProduce);
return child;
}
@Override
public PlanFragment visitPhysicalCTEAnchor(OptExpression optExpression, ExecPlan context) {
visit(optExpression.inputAt(0), context);
return visit(optExpression.inputAt(1), context);
}
@Override
public PlanFragment visitPhysicalNoCTE(OptExpression optExpression, ExecPlan context) {
return visit(optExpression.inputAt(0), context);
}
static class JoinExprInfo {
public final List<Expr> eqJoinConjuncts;
public final List<Expr> otherJoin;
public final List<Expr> conjuncts;
public JoinExprInfo(List<Expr> eqJoinConjuncts, List<Expr> otherJoin, List<Expr> conjuncts) {
this.eqJoinConjuncts = eqJoinConjuncts;
this.otherJoin = otherJoin;
this.conjuncts = conjuncts;
}
}
private JoinExprInfo buildJoinExpr(OptExpression optExpr, ExecPlan context) {
ScalarOperator predicate = optExpr.getOp().getPredicate();
ScalarOperator onPredicate;
if (optExpr.getOp() instanceof PhysicalJoinOperator) {
onPredicate = ((PhysicalJoinOperator) optExpr.getOp()).getOnPredicate();
} else if (optExpr.getOp() instanceof PhysicalStreamJoinOperator) {
onPredicate = ((PhysicalStreamJoinOperator) optExpr.getOp()).getOnPredicate();
} else {
throw new IllegalStateException("not supported join " + optExpr.getOp());
}
List<ScalarOperator> onPredicates = Utils.extractConjuncts(onPredicate);
ColumnRefSet leftChildColumns = optExpr.inputAt(0).getOutputColumns();
ColumnRefSet rightChildColumns = optExpr.inputAt(1).getOutputColumns();
List<BinaryPredicateOperator> eqOnPredicates = JoinHelper.getEqualsPredicate(
leftChildColumns, rightChildColumns, onPredicates);
Preconditions.checkState(!eqOnPredicates.isEmpty(), "must be eq-join");
for (BinaryPredicateOperator s : eqOnPredicates) {
if (!optExpr.inputAt(0).getLogicalProperty().getOutputColumns()
.containsAll(s.getChild(0).getUsedColumns())) {
s.swap();
}
}
List<Expr> eqJoinConjuncts =
eqOnPredicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
for (Expr expr : eqJoinConjuncts) {
if (expr.isConstant()) {
throw unsupportedException("Support join on constant predicate later");
}
}
List<ScalarOperator> otherJoin = Utils.extractConjuncts(onPredicate);
otherJoin.removeAll(eqOnPredicates);
List<Expr> otherJoinConjuncts = otherJoin.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
List<ScalarOperator> predicates = Utils.extractConjuncts(predicate);
List<Expr> conjuncts = predicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
return new JoinExprInfo(eqJoinConjuncts, otherJoinConjuncts, conjuncts);
}
@Override
public PlanFragment visitPhysicalStreamJoin(OptExpression optExpr, ExecPlan context) {
PhysicalStreamJoinOperator node = (PhysicalStreamJoinOperator) optExpr.getOp();
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns();
ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns();
if (!node.getJoinType().isInnerJoin()) {
throw new NotImplementedException("Only inner join is supported");
}
JoinOperator joinOperator = node.getJoinType();
PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot();
PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot();
JoinNode.DistributionMode distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET;
JoinExprInfo joinExpr = buildJoinExpr(optExpr, context);
List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts;
List<Expr> otherJoinConjuncts = joinExpr.otherJoin;
List<Expr> conjuncts = joinExpr.conjuncts;
List<PlanFragment> nullablePlanFragments = new ArrayList<>();
if (joinOperator.isLeftOuterJoin()) {
nullablePlanFragments.add(rightFragment);
} else if (joinOperator.isRightOuterJoin()) {
nullablePlanFragments.add(leftFragment);
} else if (joinOperator.isFullOuterJoin()) {
nullablePlanFragments.add(leftFragment);
nullablePlanFragments.add(rightFragment);
}
for (PlanFragment planFragment : nullablePlanFragments) {
for (TupleId tupleId : planFragment.getPlanRoot().getTupleIds()) {
context.getDescTbl().getTupleDesc(tupleId).getSlots().forEach(slot -> slot.setIsNullable(true));
}
}
JoinNode joinNode =
new StreamJoinNode(context.getNextNodeId(), leftFragmentPlanRoot, rightFragmentPlanRoot,
node.getJoinType(), eqJoinConjuncts, otherJoinConjuncts);
if (node.getProjection() != null) {
ColumnRefSet outputColumns = new ColumnRefSet();
for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) {
outputColumns.union(s.getUsedColumns());
}
for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) {
outputColumns.union(s.getUsedColumns());
}
outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet()));
joinNode.setOutputSlots(outputColumns.getStream().collect(Collectors.toList()));
}
joinNode.setDistributionMode(distributionMode);
joinNode.getConjuncts().addAll(conjuncts);
joinNode.setLimit(node.getLimit());
joinNode.computeStatistics(optExpr.getStatistics());
return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode);
}
@NotNull
private PlanFragment buildJoinFragment(ExecPlan context, PlanFragment leftFragment, PlanFragment rightFragment,
JoinNode.DistributionMode distributionMode, JoinNode joinNode) {
if (distributionMode.equals(JoinNode.DistributionMode.BROADCAST)) {
setJoinPushDown(joinNode);
rightFragment.getPlanRoot().setFragment(leftFragment);
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
} else if (distributionMode.equals(JoinNode.DistributionMode.PARTITIONED)) {
DataPartition lhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
leftFragment.getDataPartition().getPartitionExprs());
DataPartition rhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
rightFragment.getDataPartition().getPartitionExprs());
leftFragment.getChild(0).setOutputPartition(lhsJoinPartition);
rightFragment.getChild(0).setOutputPartition(rhsJoinPartition);
context.getFragments().remove(leftFragment);
context.getFragments().remove(rightFragment);
PlanFragment joinFragment = new PlanFragment(context.getNextFragmentId(),
joinNode, lhsJoinPartition);
joinFragment.addChildren(leftFragment.getChildren());
joinFragment.addChildren(rightFragment.getChildren());
joinFragment.mergeQueryGlobalDicts(leftFragment.getQueryGlobalDicts());
joinFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
context.getFragments().add(joinFragment);
return joinFragment;
} else if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE) ||
distributionMode.equals(JoinNode.DistributionMode.REPLICATED)) {
if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE)) {
joinNode.setColocate(true, "");
} else {
joinNode.setReplicated(true);
}
setJoinPushDown(joinNode);
joinNode.setChild(0, leftFragment.getPlanRoot());
joinNode.setChild(1, rightFragment.getPlanRoot());
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
} else if (distributionMode.equals(JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET)) {
setJoinPushDown(joinNode);
if (!(leftFragment.getPlanRoot() instanceof ExchangeNode) &&
!(rightFragment.getPlanRoot() instanceof ExchangeNode)) {
joinNode.setChild(0, leftFragment.getPlanRoot());
joinNode.setChild(1, rightFragment.getPlanRoot());
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
} else if (leftFragment.getPlanRoot() instanceof ExchangeNode &&
!(rightFragment.getPlanRoot() instanceof ExchangeNode)) {
return computeShuffleHashBucketPlanFragment(context, rightFragment,
leftFragment, joinNode);
} else {
return computeShuffleHashBucketPlanFragment(context, leftFragment,
rightFragment, joinNode);
}
} else {
setJoinPushDown(joinNode);
if (leftFragment.getPlanRoot() instanceof ExchangeNode &&
!(rightFragment.getPlanRoot() instanceof ExchangeNode)) {
leftFragment = computeBucketShufflePlanFragment(context, rightFragment,
leftFragment, joinNode);
} else {
leftFragment = computeBucketShufflePlanFragment(context, leftFragment,
rightFragment, joinNode);
}
return leftFragment;
}
}
@NotNull
private JoinNode.DistributionMode inferDistributionMode(OptExpression optExpr, PlanNode leftFragmentPlanRoot,
PlanNode rightFragmentPlanRoot) {
JoinNode.DistributionMode distributionMode;
if (isExchangeWithDistributionType(leftFragmentPlanRoot, DistributionSpec.DistributionType.SHUFFLE) &&
isExchangeWithDistributionType(rightFragmentPlanRoot,
DistributionSpec.DistributionType.SHUFFLE)) {
distributionMode = JoinNode.DistributionMode.PARTITIONED;
} else if (isExchangeWithDistributionType(rightFragmentPlanRoot,
DistributionSpec.DistributionType.BROADCAST)) {
distributionMode = JoinNode.DistributionMode.BROADCAST;
} else if (!(leftFragmentPlanRoot instanceof ExchangeNode) &&
!(rightFragmentPlanRoot instanceof ExchangeNode)) {
if (isColocateJoin(optExpr)) {
distributionMode = HashJoinNode.DistributionMode.COLOCATE;
} else if (ConnectContext.get().getSessionVariable().isEnableReplicationJoin() &&
rightFragmentPlanRoot.canDoReplicatedJoin()) {
distributionMode = JoinNode.DistributionMode.REPLICATED;
} else if (isShuffleJoin(optExpr)) {
distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET;
} else {
Preconditions.checkState(false, "Must be colocate/bucket/replicate join");
distributionMode = JoinNode.DistributionMode.COLOCATE;
}
} else if (isShuffleJoin(optExpr)) {
distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET;
} else {
distributionMode = JoinNode.DistributionMode.LOCAL_HASH_BUCKET;
}
return distributionMode;
}
@Override
public PlanFragment visitPhysicalStreamAgg(OptExpression optExpr, ExecPlan context) {
PhysicalStreamAggOperator node = (PhysicalStreamAggOperator) optExpr.getOp();
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor();
AggregateExprInfo aggExpr =
buildAggregateTuple(node.getAggregations(), node.getGroupBys(), null, outputTupleDesc, context);
AggregateInfo aggInfo =
AggregateInfo.create(aggExpr.groupExpr, aggExpr.aggregateExpr, outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST);
StreamAggNode aggNode = new StreamAggNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggNode.setHasNullableGenerateChild();
aggNode.computeStatistics(optExpr.getStatistics());
inputFragment.setPlanRoot(aggNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalStreamScan(OptExpression optExpr, ExecPlan context) {
PhysicalStreamScanOperator node = (PhysicalStreamScanOperator) optExpr.getOp();
OlapTable scanTable = (OlapTable) node.getTable();
context.getDescTbl().addReferencedTable(scanTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(scanTable);
BinlogScanNode binlogScanNode = new BinlogScanNode(context.getNextNodeId(), tupleDescriptor);
binlogScanNode.computeStatistics(optExpr.getStatistics());
try {
binlogScanNode.computeScanRanges();
} catch (UserException e) {
throw new StarRocksPlannerException(
"Failed to compute scan ranges for StreamScanNode, " + e.getMessage(), INTERNAL_ERROR);
}
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
binlogScanNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(binlogScanNode);
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), binlogScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
} | class PhysicalPlanTranslator extends OptExpressionVisitor<PlanFragment, ExecPlan> {
private final ColumnRefFactory columnRefFactory;
private final IdGenerator<RuntimeFilterId> runtimeFilterIdIdGenerator = RuntimeFilterId.createGenerator();
private boolean canUseLocalShuffleAgg = true;
public PhysicalPlanTranslator(ColumnRefFactory columnRefFactory) {
this.columnRefFactory = columnRefFactory;
}
public PlanFragment translate(OptExpression optExpression, ExecPlan context) {
return visit(optExpression, context);
}
@Override
public PlanFragment visit(OptExpression optExpression, ExecPlan context) {
canUseLocalShuffleAgg &= optExpression.arity() <= 1;
PlanFragment fragment = optExpression.getOp().accept(this, optExpression, context);
Projection projection = (optExpression.getOp()).getProjection();
if (projection == null) {
return fragment;
} else {
return buildProjectNode(optExpression, projection, fragment, context);
}
}
private void setUnUsedOutputColumns(PhysicalOlapScanOperator node, OlapScanNode scanNode,
List<ScalarOperator> predicates, OlapTable referenceTable) {
if (!ConnectContext.get().getSessionVariable().isEnableFilterUnusedColumnsInScanStage()) {
return;
}
MaterializedIndexMeta materializedIndexMeta =
referenceTable.getIndexMetaByIndexId(node.getSelectedIndexId());
if (materializedIndexMeta.getKeysType().isAggregationFamily() && !node.isPreAggregation()) {
return;
}
List<ColumnRefOperator> outputColumns = node.getOutputColumns();
if (outputColumns.isEmpty()) {
return;
}
Set<Integer> outputColumnIds = new HashSet<Integer>();
for (ColumnRefOperator colref : outputColumns) {
outputColumnIds.add(colref.getId());
}
Set<Integer> singlePredColumnIds = new HashSet<Integer>();
Set<Integer> complexPredColumnIds = new HashSet<Integer>();
Set<String> aggOrPrimaryKeyTableValueColumnNames = new HashSet<String>();
if (materializedIndexMeta.getKeysType().isAggregationFamily() ||
materializedIndexMeta.getKeysType() == KeysType.PRIMARY_KEYS) {
aggOrPrimaryKeyTableValueColumnNames =
materializedIndexMeta.getSchema().stream()
.filter(col -> !col.isKey())
.map(Column::getName)
.collect(Collectors.toSet());
}
for (ScalarOperator predicate : predicates) {
ColumnRefSet usedColumns = predicate.getUsedColumns();
if (DecodeVisitor.isSimpleStrictPredicate(predicate)) {
for (int cid : usedColumns.getColumnIds()) {
singlePredColumnIds.add(cid);
}
} else {
for (int cid : usedColumns.getColumnIds()) {
complexPredColumnIds.add(cid);
}
}
}
Set<Integer> unUsedOutputColumnIds = new HashSet<Integer>();
Map<Integer, Integer> dictStringIdToIntIds = node.getDictStringIdToIntIds();
for (Integer cid : singlePredColumnIds) {
Integer newCid = cid;
if (dictStringIdToIntIds.containsKey(cid)) {
newCid = dictStringIdToIntIds.get(cid);
}
if (!complexPredColumnIds.contains(newCid) && !outputColumnIds.contains(newCid)) {
unUsedOutputColumnIds.add(newCid);
}
}
scanNode.setUnUsedOutputStringColumns(unUsedOutputColumnIds, aggOrPrimaryKeyTableValueColumnNames);
}
@Override
public PlanFragment visitPhysicalProject(OptExpression optExpr, ExecPlan context) {
PhysicalProjectOperator node = (PhysicalProjectOperator) optExpr.getOp();
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
Preconditions.checkState(!node.getColumnRefMap().isEmpty());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(),
node.getCommonSubOperatorMap()));
commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(false);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
Map<SlotId, Expr> projectMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap()));
projectMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
ProjectNode projectNode =
new ProjectNode(context.getNextNodeId(),
tupleDescriptor,
inputFragment.getPlanRoot(),
projectMap,
commonSubOperatorMap);
projectNode.setHasNullableGenerateChild();
projectNode.computeStatistics(optExpr.getStatistics());
for (SlotId sid : projectMap.keySet()) {
SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt());
slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild());
}
tupleDescriptor.computeMemLayout();
projectNode.setLimit(inputFragment.getPlanRoot().getLimit());
inputFragment.setPlanRoot(projectNode);
return inputFragment;
}
public PlanFragment buildProjectNode(OptExpression optExpression, Projection node, PlanFragment inputFragment,
ExecPlan context) {
if (node == null) {
return inputFragment;
}
Preconditions.checkState(!node.getColumnRefMap().isEmpty());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(),
node.getCommonSubOperatorMap()));
commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(false);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
Map<SlotId, Expr> projectMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap()));
projectMap.put(new SlotId(entry.getKey().getId()), expr);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setIsNullable(expr.isNullable());
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(expr.getType());
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
ProjectNode projectNode =
new ProjectNode(context.getNextNodeId(),
tupleDescriptor,
inputFragment.getPlanRoot(),
projectMap,
commonSubOperatorMap);
projectNode.setHasNullableGenerateChild();
Statistics statistics = optExpression.getStatistics();
Statistics.Builder b = Statistics.builder();
b.setOutputRowCount(statistics.getOutputRowCount());
b.addColumnStatisticsFromOtherStatistic(statistics, new ColumnRefSet(node.getOutputColumns()));
projectNode.computeStatistics(b.build());
for (SlotId sid : projectMap.keySet()) {
SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt());
slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild());
}
tupleDescriptor.computeMemLayout();
projectNode.setLimit(inputFragment.getPlanRoot().getLimit());
inputFragment.setPlanRoot(projectNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalDecode(OptExpression optExpression, ExecPlan context) {
PhysicalDecodeOperator node = (PhysicalDecodeOperator) optExpression.getOp();
PlanFragment inputFragment = visit(optExpression.inputAt(0), context);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
for (TupleId tupleId : inputFragment.getPlanRoot().getTupleIds()) {
TupleDescriptor childTuple = context.getDescTbl().getTupleDesc(tupleId);
ArrayList<SlotDescriptor> slots = childTuple.getSlots();
for (SlotDescriptor slot : slots) {
int slotId = slot.getId().asInt();
boolean isNullable = slot.getIsNullable();
if (node.getDictToStrings().containsKey(slotId)) {
Integer stringSlotId = node.getDictToStrings().get(slotId);
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(stringSlotId));
slotDescriptor.setIsNullable(isNullable);
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(Type.VARCHAR);
context.getColRefToExpr().put(new ColumnRefOperator(stringSlotId, Type.VARCHAR,
"<dict-code>", slotDescriptor.getIsNullable()),
new SlotRef(stringSlotId.toString(), slotDescriptor));
} else {
SlotDescriptor slotDescriptor = new SlotDescriptor(slot.getId(), tupleDescriptor, slot);
tupleDescriptor.addSlot(slotDescriptor);
}
}
}
Map<SlotId, Expr> projectMap = Maps.newHashMap();
for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getStringFunctions().entrySet()) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(),
node.getStringFunctions()));
projectMap.put(new SlotId(entry.getKey().getId()), expr);
Preconditions.checkState(context.getColRefToExpr().containsKey(entry.getKey()));
}
tupleDescriptor.computeMemLayout();
DecodeNode decodeNode = new DecodeNode(context.getNextNodeId(),
tupleDescriptor,
inputFragment.getPlanRoot(),
node.getDictToStrings(), projectMap);
decodeNode.computeStatistics(optExpression.getStatistics());
decodeNode.setLimit(node.getLimit());
inputFragment.setPlanRoot(decodeNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalOlapScan(OptExpression optExpr, ExecPlan context) {
PhysicalOlapScanOperator node = (PhysicalOlapScanOperator) optExpr.getOp();
OlapTable referenceTable = (OlapTable) node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
OlapScanNode scanNode = new OlapScanNode(context.getNextNodeId(), tupleDescriptor, "OlapScanNode");
scanNode.setLimit(node.getLimit());
scanNode.computeStatistics(optExpr.getStatistics());
try {
scanNode.updateScanInfo(node.getSelectedPartitionId(),
node.getSelectedTabletId(),
node.getSelectedIndexId());
long selectedIndexId = node.getSelectedIndexId();
long totalTabletsNum = 0;
long localBeId = -1;
if (Config.enable_local_replica_selection) {
localBeId = GlobalStateMgr.getCurrentSystemInfo()
.getBackendIdByHost(FrontendOptions.getLocalHostAddress());
}
List<Long> selectedNonEmptyPartitionIds = node.getSelectedPartitionId().stream().filter(p -> {
List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(p);
return selectTabletIds != null && !selectTabletIds.isEmpty();
}).collect(Collectors.toList());
scanNode.setSelectedPartitionIds(selectedNonEmptyPartitionIds);
for (Long partitionId : scanNode.getSelectedPartitionIds()) {
List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(partitionId);
Preconditions.checkState(selectTabletIds != null && !selectTabletIds.isEmpty());
final Partition partition = referenceTable.getPartition(partitionId);
final MaterializedIndex selectedTable = partition.getIndex(selectedIndexId);
List<Long> allTabletIds = selectedTable.getTabletIdsInOrder();
Map<Long, Integer> tabletId2BucketSeq = Maps.newHashMap();
for (int i = 0; i < allTabletIds.size(); i++) {
tabletId2BucketSeq.put(allTabletIds.get(i), i);
}
totalTabletsNum += selectedTable.getTablets().size();
scanNode.setTabletId2BucketSeq(tabletId2BucketSeq);
List<Tablet> tablets =
selectTabletIds.stream().map(selectedTable::getTablet).collect(Collectors.toList());
scanNode.addScanRangeLocations(partition, selectedTable, tablets, localBeId);
}
scanNode.setTotalTabletsNum(totalTabletsNum);
} catch (UserException e) {
throw new StarRocksPlannerException(
"Build Exec OlapScanNode fail, scan info is invalid," + e.getMessage(),
INTERNAL_ERROR);
}
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
if (slotDescriptor.getOriginType().isComplexType()) {
slotDescriptor.setOriginType(entry.getKey().getType());
slotDescriptor.setType(entry.getKey().getType());
}
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
for (ColumnRefOperator entry : node.getGlobalDictStringColumns()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getId()));
slotDescriptor.setIsNullable(entry.isNullable());
slotDescriptor.setType(entry.getType());
slotDescriptor.setIsMaterialized(false);
context.getColRefToExpr().put(entry, new SlotRef(entry.toString(), slotDescriptor));
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
for (ScalarOperator predicate : node.getPrunedPartitionPredicates()) {
scanNode.getPrunedPartitionPredicates()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
tupleDescriptor.computeMemLayout();
setUnUsedOutputColumns(node, scanNode, predicates, referenceTable);
scanNode.setIsSortedByKeyPerTablet(node.needSortedByKeyPerTablet());
scanNode.setIsPreAggregation(node.isPreAggregation(), node.getTurnOffReason());
scanNode.setDictStringIdToIntIds(node.getDictStringIdToIntIds());
scanNode.updateAppliedDictStringColumns(node.getGlobalDicts().stream().
map(entry -> entry.first).collect(Collectors.toSet()));
List<ColumnRefOperator> bucketColumns = getShuffleColumns(node.getDistributionSpec());
boolean useAllBucketColumns =
bucketColumns.stream().allMatch(c -> node.getColRefToColumnMetaMap().containsKey(c));
if (useAllBucketColumns) {
List<Expr> bucketExprs = bucketColumns.stream()
.map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
scanNode.setBucketExprs(bucketExprs);
scanNode.setBucketColumns(bucketColumns);
}
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM);
fragment.setQueryGlobalDicts(node.getGlobalDicts());
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalMetaScan(OptExpression optExpression, ExecPlan context) {
PhysicalMetaScanOperator scan = (PhysicalMetaScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(scan.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(scan.getTable());
MetaScanNode scanNode =
new MetaScanNode(context.getNextNodeId(),
tupleDescriptor, (OlapTable) scan.getTable(), scan.getAggColumnIdToNames());
scanNode.computeRangeLocations();
scanNode.computeStatistics(optExpression.getStatistics());
for (Map.Entry<ColumnRefOperator, Column> entry : scan.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
private void prepareContextSlots(PhysicalScanOperator node, ExecPlan context, TupleDescriptor tupleDescriptor) {
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
if (slotDescriptor.getOriginType().isComplexType()) {
slotDescriptor.setOriginType(entry.getKey().getType());
slotDescriptor.setType(entry.getKey().getType());
}
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
}
private void prepareCommonExpr(HDFSScanNodePredicates scanNodePredicates,
ScanOperatorPredicates predicates, ExecPlan context) {
List<ScalarOperator> noEvalPartitionConjuncts = predicates.getNoEvalPartitionConjuncts();
List<ScalarOperator> nonPartitionConjuncts = predicates.getNonPartitionConjuncts();
List<ScalarOperator> partitionConjuncts = predicates.getPartitionConjuncts();
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator partitionConjunct : partitionConjuncts) {
scanNodePredicates.getPartitionConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(partitionConjunct, formatterContext));
}
for (ScalarOperator noEvalPartitionConjunct : noEvalPartitionConjuncts) {
scanNodePredicates.getNoEvalPartitionConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(noEvalPartitionConjunct, formatterContext));
}
for (ScalarOperator nonPartitionConjunct : nonPartitionConjuncts) {
scanNodePredicates.getNonPartitionConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(nonPartitionConjunct, formatterContext));
}
}
private void prepareMinMaxExpr(HDFSScanNodePredicates scanNodePredicates,
ScanOperatorPredicates predicates, ExecPlan context) {
/*
* populates 'minMaxTuple' with slots for statistics values,
* and populates 'minMaxConjuncts' with conjuncts pointing into the 'minMaxTuple'
*/
List<ScalarOperator> minMaxConjuncts = predicates.getMinMaxConjuncts();
TupleDescriptor minMaxTuple = context.getDescTbl().createTupleDescriptor();
for (ScalarOperator minMaxConjunct : minMaxConjuncts) {
for (ColumnRefOperator columnRefOperator : Utils.extractColumnRef(minMaxConjunct)) {
SlotDescriptor slotDescriptor =
context.getDescTbl()
.addSlotDescriptor(minMaxTuple, new SlotId(columnRefOperator.getId()));
Column column = predicates.getMinMaxColumnRefMap().get(columnRefOperator);
slotDescriptor.setColumn(column);
slotDescriptor.setIsNullable(column.isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr()
.put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor));
}
}
minMaxTuple.computeMemLayout();
scanNodePredicates.setMinMaxTuple(minMaxTuple);
ScalarOperatorToExpr.FormatterContext minMaxFormatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator minMaxConjunct : minMaxConjuncts) {
scanNodePredicates.getMinMaxConjuncts().
add(ScalarOperatorToExpr.buildExecExpression(minMaxConjunct, minMaxFormatterContext));
}
}
@Override
public PlanFragment visitPhysicalHudiScan(OptExpression optExpression, ExecPlan context) {
PhysicalHudiScanOperator node = (PhysicalHudiScanOperator) optExpression.getOp();
ScanOperatorPredicates predicates = node.getScanOperatorPredicates();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
HudiScanNode hudiScanNode =
new HudiScanNode(context.getNextNodeId(), tupleDescriptor, "HudiScanNode");
hudiScanNode.computeStatistics(optExpression.getStatistics());
try {
HDFSScanNodePredicates scanNodePredicates = hudiScanNode.getScanNodePredicates();
scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds());
scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey());
hudiScanNode.setupScanRangeLocations(context.getDescTbl());
prepareCommonExpr(scanNodePredicates, predicates, context);
prepareMinMaxExpr(scanNodePredicates, predicates, context);
} catch (Exception e) {
LOG.warn("Hudi scan node get scan range locations failed : " + e);
LOG.warn(e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
hudiScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(hudiScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), hudiScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalHiveScan(OptExpression optExpression, ExecPlan context) {
PhysicalHiveScanOperator node = (PhysicalHiveScanOperator) optExpression.getOp();
ScanOperatorPredicates predicates = node.getScanOperatorPredicates();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
HdfsScanNode hdfsScanNode =
new HdfsScanNode(context.getNextNodeId(), tupleDescriptor, "HdfsScanNode");
hdfsScanNode.computeStatistics(optExpression.getStatistics());
try {
HDFSScanNodePredicates scanNodePredicates = hdfsScanNode.getScanNodePredicates();
scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds());
scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey());
hdfsScanNode.setupScanRangeLocations(context.getDescTbl());
prepareCommonExpr(scanNodePredicates, predicates, context);
prepareMinMaxExpr(scanNodePredicates, predicates, context);
} catch (Exception e) {
LOG.warn("Hdfs scan node get scan range locations failed : " + e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
hdfsScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(hdfsScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), hdfsScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalFileScan(OptExpression optExpression, ExecPlan context) {
PhysicalFileScanOperator node = (PhysicalFileScanOperator) optExpression.getOp();
ScanOperatorPredicates predicates = node.getScanOperatorPredicates();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
FileTableScanNode fileTableScanNode =
new FileTableScanNode(context.getNextNodeId(), tupleDescriptor, "FileTableScanNode");
fileTableScanNode.computeStatistics(optExpression.getStatistics());
try {
HDFSScanNodePredicates scanNodePredicates = fileTableScanNode.getScanNodePredicates();
fileTableScanNode.setupScanRangeLocations();
prepareCommonExpr(scanNodePredicates, predicates, context);
prepareMinMaxExpr(scanNodePredicates, predicates, context);
} catch (Exception e) {
LOG.warn("Hdfs scan node get scan range locations failed : ", e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
fileTableScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(fileTableScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), fileTableScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalDeltaLakeScan(OptExpression optExpression, ExecPlan context) {
PhysicalDeltaLakeScanOperator node = (PhysicalDeltaLakeScanOperator) optExpression.getOp();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
DeltaLakeScanNode deltaLakeScanNode =
new DeltaLakeScanNode(context.getNextNodeId(), tupleDescriptor, "DeltaLakeScanNode");
deltaLakeScanNode.computeStatistics(optExpression.getStatistics());
try {
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
for (ScalarOperator predicate : predicates) {
deltaLakeScanNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
deltaLakeScanNode.setupScanRangeLocations(context.getDescTbl());
HDFSScanNodePredicates scanNodePredicates = deltaLakeScanNode.getScanNodePredicates();
prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context);
} catch (AnalysisException e) {
LOG.warn("Delta lake scan node get scan range locations failed : " + e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
deltaLakeScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(deltaLakeScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), deltaLakeScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalIcebergScan(OptExpression optExpression, ExecPlan context) {
PhysicalIcebergScanOperator node = (PhysicalIcebergScanOperator) optExpression.getOp();
Table referenceTable = node.getTable();
context.getDescTbl().addReferencedTable(referenceTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(referenceTable);
prepareContextSlots(node, context, tupleDescriptor);
IcebergScanNode icebergScanNode =
new IcebergScanNode(context.getNextNodeId(), tupleDescriptor, "IcebergScanNode");
icebergScanNode.computeStatistics(optExpression.getStatistics());
try {
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
for (ScalarOperator predicate : predicates) {
icebergScanNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
icebergScanNode.preProcessIcebergPredicate(node.getPredicate());
icebergScanNode.setupScanRangeLocations();
icebergScanNode.appendEqualityColumns(node, columnRefFactory, context);
HDFSScanNodePredicates scanNodePredicates = icebergScanNode.getScanNodePredicates();
prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context);
} catch (UserException e) {
LOG.warn("Iceberg scan node get scan range locations failed : " + e);
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
icebergScanNode.setLimit(node.getLimit());
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(icebergScanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), icebergScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
@Override
public PlanFragment visitPhysicalMysqlScan(OptExpression optExpression, ExecPlan context) {
PhysicalMysqlScanOperator node = (PhysicalMysqlScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
MysqlScanNode scanNode = new MysqlScanNode(context.getNextNodeId(), tupleDescriptor,
(MysqlTable) node.getTable());
if (node.getTemporalClause() != null) {
scanNode.setTemporalClause(node.getTemporalClause());
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
formatterContext.setImplicitCast(true);
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
scanNode.setLimit(node.getLimit());
scanNode.computeColumnsAndFilters();
scanNode.computeStatistics(optExpression.getStatistics());
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalEsScan(OptExpression optExpression, ExecPlan context) {
PhysicalEsScanOperator node = (PhysicalEsScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
EsScanNode scanNode = new EsScanNode(context.getNextNodeId(), tupleDescriptor, "EsScanNode");
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
scanNode.setLimit(node.getLimit());
scanNode.computeStatistics(optExpression.getStatistics());
try {
scanNode.assignBackends();
} catch (UserException e) {
throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR);
}
scanNode.setShardScanRanges(scanNode.computeShardLocations(node.getSelectedIndex()));
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalJDBCScan(OptExpression optExpression, ExecPlan context) {
PhysicalJDBCScanOperator node = (PhysicalJDBCScanOperator) optExpression.getOp();
context.getDescTbl().addReferencedTable(node.getTable());
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(node.getTable());
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
JDBCScanNode scanNode = new JDBCScanNode(context.getNextNodeId(), tupleDescriptor,
(JDBCTable) node.getTable());
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
formatterContext.setImplicitCast(true);
for (ScalarOperator predicate : predicates) {
scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
scanNode.setLimit(node.getLimit());
scanNode.computeColumnsAndFilters();
scanNode.computeStatistics(optExpression.getStatistics());
context.getScanNodes().add(scanNode);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalValues(OptExpression optExpr, ExecPlan context) {
PhysicalValuesOperator valuesOperator = (PhysicalValuesOperator) optExpr.getOp();
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : valuesOperator.getColumnRefSet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(columnRefOperator.getId()));
slotDescriptor.setIsNullable(columnRefOperator.isNullable());
slotDescriptor.setIsMaterialized(true);
slotDescriptor.setType(columnRefOperator.getType());
context.getColRefToExpr()
.put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor));
}
tupleDescriptor.computeMemLayout();
if (valuesOperator.getRows().isEmpty()) {
EmptySetNode emptyNode = new EmptySetNode(context.getNextNodeId(),
Lists.newArrayList(tupleDescriptor.getId()));
emptyNode.computeStatistics(optExpr.getStatistics());
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), emptyNode,
DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
} else {
UnionNode unionNode = new UnionNode(context.getNextNodeId(), tupleDescriptor.getId());
unionNode.setLimit(valuesOperator.getLimit());
List<List<Expr>> consts = new ArrayList<>();
for (List<ScalarOperator> row : valuesOperator.getRows()) {
List<Expr> exprRow = new ArrayList<>();
for (ScalarOperator field : row) {
exprRow.add(ScalarOperatorToExpr.buildExecExpression(
field, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())));
}
consts.add(exprRow);
}
unionNode.setMaterializedConstExprLists_(consts);
unionNode.computeStatistics(optExpr.getStatistics());
/*
* TODO(lhy):
* It doesn't make sense for vectorized execution engines, but it will appear in explain.
* we can delete this when refactoring explain in the future,
*/
consts.forEach(unionNode::addConstExprList);
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), unionNode,
DataPartition.UNPARTITIONED);
context.getFragments().add(fragment);
return fragment;
}
}
public static boolean hasNoExchangeNodes(PlanNode root) {
if (root instanceof ExchangeNode) {
return false;
}
for (PlanNode childNode : root.getChildren()) {
if (!hasNoExchangeNodes(childNode)) {
return false;
}
}
return true;
}
/**
* Whether all the nodes of the plan tree only contain the specific node types.
*
* @param root The plan tree root.
* @param requiredNodeTypes The specific node type.
* @return true if all the nodes belong to the node types, otherwise false.
*/
private boolean onlyContainNodeTypes(PlanNode root, List<Class<? extends PlanNode>> requiredNodeTypes) {
boolean rootMatched = requiredNodeTypes.stream().anyMatch(type -> type.isInstance(root));
if (!rootMatched) {
return false;
}
for (PlanNode child : root.getChildren()) {
if (!onlyContainNodeTypes(child, requiredNodeTypes)) {
return false;
}
}
return true;
}
/**
* Remove ExchangeNode between AggNode and ScanNode for the single backend.
* <p>
* This is used to generate "ScanNode->LocalShuffle->OnePhaseLocalAgg" for the single backend,
* which contains two steps:
* 1. Ignore the network cost for ExchangeNode when estimating cost model.
* 2. Remove ExchangeNode between AggNode and ScanNode when building fragments.
* <p>
* Specifically, transfer
* (AggNode->ExchangeNode)->([ProjectNode->]ScanNode)
* - *inputFragment sourceFragment
* to
* (AggNode->[ProjectNode->]ScanNode)
* - *sourceFragment
* That is, when matching this fragment pattern, remove inputFragment and return sourceFragment.
*
* @param inputFragment The input fragment to match the above pattern.
* @param context The context of building fragment, which contains all the fragments.
* @return SourceFragment if it matches th pattern, otherwise the original inputFragment.
*/
private PlanFragment removeExchangeNodeForLocalShuffleAgg(PlanFragment inputFragment, ExecPlan context) {
if (ConnectContext.get() == null) {
return inputFragment;
}
if (!canUseLocalShuffleAgg) {
return inputFragment;
}
SessionVariable sessionVariable = ConnectContext.get().getSessionVariable();
boolean enableLocalShuffleAgg = sessionVariable.isEnableLocalShuffleAgg()
&& sessionVariable.isEnablePipelineEngine()
&& GlobalStateMgr.getCurrentSystemInfo().isSingleBackendAndComputeNode();
if (!enableLocalShuffleAgg) {
return inputFragment;
}
if (!(inputFragment.getPlanRoot() instanceof ExchangeNode)) {
return inputFragment;
}
PlanNode sourceFragmentRoot = inputFragment.getPlanRoot().getChild(0);
if (!onlyContainNodeTypes(sourceFragmentRoot, ImmutableList.of(ScanNode.class, ProjectNode.class))) {
return inputFragment;
}
PlanFragment sourceFragment = sourceFragmentRoot.getFragment();
if (sourceFragment instanceof MultiCastPlanFragment) {
return inputFragment;
}
ArrayList<PlanFragment> fragments = context.getFragments();
for (int i = fragments.size() - 1; i >= 0; --i) {
if (fragments.get(i).equals(inputFragment)) {
fragments.remove(i);
break;
}
}
clearOlapScanNodePartitions(sourceFragment.getPlanRoot());
sourceFragment.clearDestination();
sourceFragment.clearOutputPartition();
return sourceFragment;
}
/**
* Clear partitionExprs of OlapScanNode (the bucket keys to pass to BE).
* <p>
* When partitionExprs of OlapScanNode are passed to BE, the post operators will use them as
* local shuffle partition exprs.
* Otherwise, the operators will use the original partition exprs (group by keys or join on keys).
* <p>
* The bucket keys can satisfy the required hash property of blocking aggregation except two scenarios:
* - OlapScanNode only has one tablet after pruned.
* - It is executed on the single BE.
* As for these two scenarios, which will generate ScanNode(k1)->LocalShuffle(c1)->BlockingAgg(c1),
* partitionExprs of OlapScanNode must be cleared to make BE use group by keys not bucket keys as
* local shuffle partition exprs.
*
* @param root The root node of the fragment which need to check whether to clear bucket keys of OlapScanNode.
*/
private void clearOlapScanNodePartitions(PlanNode root) {
if (root instanceof OlapScanNode) {
OlapScanNode scanNode = (OlapScanNode) root;
scanNode.setBucketExprs(Lists.newArrayList());
scanNode.setBucketColumns(Lists.newArrayList());
return;
}
if (root instanceof ExchangeNode) {
return;
}
for (PlanNode child : root.getChildren()) {
clearOlapScanNodePartitions(child);
}
}
private static class AggregateExprInfo {
public final ArrayList<Expr> groupExpr;
public final ArrayList<FunctionCallExpr> aggregateExpr;
public final ArrayList<Expr> partitionExpr;
public final ArrayList<Expr> intermediateExpr;
public AggregateExprInfo(ArrayList<Expr> groupExpr, ArrayList<FunctionCallExpr> aggregateExpr,
ArrayList<Expr> partitionExpr,
ArrayList<Expr> intermediateExpr) {
this.groupExpr = groupExpr;
this.aggregateExpr = aggregateExpr;
this.partitionExpr = partitionExpr;
this.intermediateExpr = intermediateExpr;
}
}
private AggregateExprInfo buildAggregateTuple(
Map<ColumnRefOperator, CallOperator> aggregations,
List<ColumnRefOperator> groupBys,
List<ColumnRefOperator> partitionBys,
TupleDescriptor outputTupleDesc,
ExecPlan context) {
ArrayList<Expr> groupingExpressions = Lists.newArrayList();
boolean forExchangePerf = aggregations.values().stream().anyMatch(aggFunc ->
aggFunc.getFnName().equals(FunctionSet.EXCHANGE_BYTES) ||
aggFunc.getFnName().equals(FunctionSet.EXCHANGE_SPEED)) &&
ConnectContext.get().getSessionVariable().getNewPlannerAggStage() == 1;
if (!forExchangePerf) {
for (ColumnRefOperator grouping : CollectionUtils.emptyIfNull(groupBys)) {
Expr groupingExpr = ScalarOperatorToExpr.buildExecExpression(grouping,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
groupingExpressions.add(groupingExpr);
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(grouping.getId()));
slotDesc.setType(groupingExpr.getType());
slotDesc.setIsNullable(groupingExpr.isNullable());
slotDesc.setIsMaterialized(true);
}
}
ArrayList<FunctionCallExpr> aggregateExprList = Lists.newArrayList();
ArrayList<Expr> intermediateAggrExprs = Lists.newArrayList();
for (Map.Entry<ColumnRefOperator, CallOperator> aggregation : aggregations.entrySet()) {
FunctionCallExpr aggExpr = (FunctionCallExpr) ScalarOperatorToExpr.buildExecExpression(
aggregation.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
aggregateExprList.add(aggExpr);
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(outputTupleDesc, new SlotId(aggregation.getKey().getId()));
slotDesc.setType(aggregation.getValue().getType());
slotDesc.setIsNullable(aggExpr.isNullable());
slotDesc.setIsMaterialized(true);
context.getColRefToExpr()
.put(aggregation.getKey(), new SlotRef(aggregation.getKey().toString(), slotDesc));
SlotDescriptor intermediateSlotDesc = new SlotDescriptor(slotDesc.getId(), slotDesc.getParent());
AggregateFunction aggrFn = (AggregateFunction) aggExpr.getFn();
Type intermediateType = aggrFn.getIntermediateType() != null ?
aggrFn.getIntermediateType() : aggrFn.getReturnType();
intermediateSlotDesc.setType(intermediateType);
intermediateSlotDesc.setIsNullable(aggrFn.isNullable());
intermediateSlotDesc.setIsMaterialized(true);
SlotRef intermediateSlotRef = new SlotRef(aggregation.getKey().toString(), intermediateSlotDesc);
intermediateAggrExprs.add(intermediateSlotRef);
}
ArrayList<Expr> partitionExpressions = Lists.newArrayList();
for (ColumnRefOperator column : CollectionUtils.emptyIfNull(partitionBys)) {
Expr partitionExpr = ScalarOperatorToExpr.buildExecExpression(column,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(column.getId()));
slotDesc.setType(partitionExpr.getType());
slotDesc.setIsNullable(partitionExpr.isNullable());
slotDesc.setIsMaterialized(true);
context.getColRefToExpr().put(column, new SlotRef(column.toString(), slotDesc));
partitionExpressions.add(new SlotRef(slotDesc));
}
outputTupleDesc.computeMemLayout();
return new AggregateExprInfo(groupingExpressions, aggregateExprList, partitionExpressions,
intermediateAggrExprs);
}
@Override
public PlanFragment visitPhysicalHashAggregate(OptExpression optExpr, ExecPlan context) {
PhysicalHashAggregateOperator node = (PhysicalHashAggregateOperator) optExpr.getOp();
PlanFragment originalInputFragment = visit(optExpr.inputAt(0), context);
PlanFragment inputFragment = removeExchangeNodeForLocalShuffleAgg(originalInputFragment, context);
boolean withLocalShuffle = inputFragment != originalInputFragment;
Map<ColumnRefOperator, CallOperator> aggregations = node.getAggregations();
List<ColumnRefOperator> groupBys = node.getGroupBys();
List<ColumnRefOperator> partitionBys = node.getPartitionByColumns();
TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor();
AggregateExprInfo aggExpr =
buildAggregateTuple(aggregations, groupBys, partitionBys, outputTupleDesc, context);
ArrayList<Expr> groupingExpressions = aggExpr.groupExpr;
ArrayList<FunctionCallExpr> aggregateExprList = aggExpr.aggregateExpr;
ArrayList<Expr> partitionExpressions = aggExpr.partitionExpr;
ArrayList<Expr> intermediateAggrExprs = aggExpr.intermediateExpr;
AggregationNode aggregationNode;
if (node.getType().isLocal() && node.isSplit()) {
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggregationNode.unsetNeedsFinalize();
aggregationNode.setIsPreagg(node.isUseStreamingPreAgg());
aggregationNode.setIntermediateTuple();
if (!partitionExpressions.isEmpty()) {
inputFragment.setOutputPartition(DataPartition.hashPartitioned(partitionExpressions));
}
if (!withLocalShuffle && !node.isUseStreamingPreAgg() &&
hasColocateOlapScanChildInFragment(aggregationNode)) {
aggregationNode.setColocate(true);
}
} else if (node.getType().isGlobal() || (node.getType().isLocal() && !node.isSplit())) {
if (node.hasSingleDistinct()) {
for (int i = 0; i < aggregateExprList.size(); i++) {
if (i != node.getSingleDistinctFunctionPos()) {
aggregateExprList.get(i).setMergeAggFn();
}
}
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.SECOND);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
aggInfo);
} else if (!node.isSplit()) {
rewriteAggDistinctFirstStageFunction(aggregateExprList);
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
aggInfo);
} else {
aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn);
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.SECOND_MERGE);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
aggInfo);
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
aggregationNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
aggregationNode.setLimit(node.getLimit());
if (!withLocalShuffle && hasColocateOlapScanChildInFragment(aggregationNode)) {
aggregationNode.setColocate(true);
}
} else if (node.getType().isDistinctGlobal()) {
aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn);
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST_MERGE);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggregationNode.unsetNeedsFinalize();
aggregationNode.setIntermediateTuple();
if (!withLocalShuffle && hasColocateOlapScanChildInFragment(aggregationNode)) {
aggregationNode.setColocate(true);
}
} else if (node.getType().isDistinctLocal()) {
for (int i = 0; i < aggregateExprList.size(); i++) {
if (i != node.getSingleDistinctFunctionPos()) {
aggregateExprList.get(i).setMergeAggFn();
}
}
AggregateInfo aggInfo = AggregateInfo.create(
groupingExpressions,
aggregateExprList,
outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.SECOND);
aggregationNode =
new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggregationNode.unsetNeedsFinalize();
aggregationNode.setIsPreagg(node.isUseStreamingPreAgg());
aggregationNode.setIntermediateTuple();
} else {
throw unsupportedException("Not support aggregate type : " + node.getType());
}
aggregationNode.setUseSortAgg(node.isUseSortAgg());
aggregationNode.setStreamingPreaggregationMode(context.getConnectContext().
getSessionVariable().getStreamingPreaggregationMode());
aggregationNode.setHasNullableGenerateChild();
aggregationNode.computeStatistics(optExpr.getStatistics());
if (node.isOnePhaseAgg() || node.isMergedLocalAgg() || node.getType().isDistinctGlobal()) {
if (optExpr.getLogicalProperty().oneTabletProperty().supportOneTabletOpt) {
clearOlapScanNodePartitions(aggregationNode);
}
inputFragment.setAssignScanRangesPerDriverSeq(!withLocalShuffle);
aggregationNode.setWithLocalShuffle(withLocalShuffle);
aggregationNode.setIdenticallyDistributed(true);
}
aggregationNode.getAggInfo().setIntermediateAggrExprs(intermediateAggrExprs);
inputFragment.setPlanRoot(aggregationNode);
return inputFragment;
}
public boolean hasColocateOlapScanChildInFragment(PlanNode node) {
if (node instanceof OlapScanNode) {
ColocateTableIndex colocateIndex = GlobalStateMgr.getCurrentColocateIndex();
OlapScanNode scanNode = (OlapScanNode) node;
if (colocateIndex.isColocateTable(scanNode.getOlapTable().getId())) {
return true;
}
}
if (node instanceof ExchangeNode) {
return false;
}
boolean hasOlapScanChild = false;
for (PlanNode child : node.getChildren()) {
hasOlapScanChild |= hasColocateOlapScanChildInFragment(child);
}
return hasOlapScanChild;
}
public void rewriteAggDistinctFirstStageFunction(List<FunctionCallExpr> aggregateExprList) {
int singleDistinctCount = 0;
int singleDistinctIndex = 0;
FunctionCallExpr functionCallExpr = null;
for (int i = 0; i < aggregateExprList.size(); ++i) {
FunctionCallExpr callExpr = aggregateExprList.get(i);
if (callExpr.isDistinct()) {
++singleDistinctCount;
functionCallExpr = callExpr;
singleDistinctIndex = i;
}
}
if (singleDistinctCount == 1) {
FunctionCallExpr replaceExpr = null;
final String functionName = functionCallExpr.getFnName().getFunction();
if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) {
replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_COUNT, functionCallExpr.getParams());
replaceExpr.setFn(Expr.getBuiltinFunction(FunctionSet.MULTI_DISTINCT_COUNT,
new Type[] {functionCallExpr.getChild(0).getType()},
IS_NONSTRICT_SUPERTYPE_OF));
replaceExpr.getParams().setIsDistinct(false);
} else if (functionName.equalsIgnoreCase(FunctionSet.SUM)) {
replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_SUM, functionCallExpr.getParams());
Function multiDistinctSum = DecimalV3FunctionAnalyzer.convertSumToMultiDistinctSum(
functionCallExpr.getFn(), functionCallExpr.getChild(0).getType());
replaceExpr.setFn(multiDistinctSum);
replaceExpr.getParams().setIsDistinct(false);
}
Preconditions.checkState(replaceExpr != null);
ExpressionAnalyzer.analyzeExpressionIgnoreSlot(replaceExpr, ConnectContext.get());
aggregateExprList.set(singleDistinctIndex, replaceExpr);
}
}
@Override
public PlanFragment visitPhysicalDistribution(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalDistributionOperator distribution = (PhysicalDistributionOperator) optExpr.getOp();
ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(),
inputFragment.getPlanRoot(), distribution.getDistributionSpec().getType());
DataPartition dataPartition;
if (DistributionSpec.DistributionType.GATHER.equals(distribution.getDistributionSpec().getType())) {
exchangeNode.setNumInstances(1);
dataPartition = DataPartition.UNPARTITIONED;
GatherDistributionSpec spec = (GatherDistributionSpec) distribution.getDistributionSpec();
if (spec.hasLimit()) {
exchangeNode.setLimit(spec.getLimit());
}
} else if (DistributionSpec.DistributionType.BROADCAST
.equals(distribution.getDistributionSpec().getType())) {
exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances());
dataPartition = DataPartition.UNPARTITIONED;
} else if (DistributionSpec.DistributionType.SHUFFLE.equals(distribution.getDistributionSpec().getType())) {
exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances());
List<ColumnRefOperator> partitionColumns =
getShuffleColumns((HashDistributionSpec) distribution.getDistributionSpec());
List<Expr> distributeExpressions =
partitionColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
dataPartition = DataPartition.hashPartitioned(distributeExpressions);
} else {
throw new StarRocksPlannerException("Unsupport exchange type : "
+ distribution.getDistributionSpec().getType(), INTERNAL_ERROR);
}
exchangeNode.setDataPartition(dataPartition);
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition);
fragment.setQueryGlobalDicts(distribution.getGlobalDicts());
inputFragment.setDestination(exchangeNode);
inputFragment.setOutputPartition(dataPartition);
context.getFragments().add(fragment);
return fragment;
}
@Override
public PlanFragment visitPhysicalTopN(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalTopNOperator topN = (PhysicalTopNOperator) optExpr.getOp();
Preconditions.checkState(topN.getOffset() >= 0);
if (!topN.isSplit()) {
return buildPartialTopNFragment(optExpr, context, topN.getPartitionByColumns(),
topN.getPartitionLimit(), topN.getOrderSpec(),
topN.getTopNType(), topN.getLimit(), topN.getOffset(), inputFragment);
} else {
return buildFinalTopNFragment(context, topN.getTopNType(), topN.getLimit(), topN.getOffset(),
inputFragment, optExpr);
}
}
private PlanFragment buildFinalTopNFragment(ExecPlan context, TopNType topNType, long limit, long offset,
PlanFragment inputFragment,
OptExpression optExpr) {
ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(),
inputFragment.getPlanRoot(),
DistributionSpec.DistributionType.GATHER);
exchangeNode.setNumInstances(1);
DataPartition dataPartition = DataPartition.UNPARTITIONED;
exchangeNode.setDataPartition(dataPartition);
Preconditions.checkState(inputFragment.getPlanRoot() instanceof SortNode);
SortNode sortNode = (SortNode) inputFragment.getPlanRoot();
sortNode.setTopNType(topNType);
exchangeNode.setMergeInfo(sortNode.getSortInfo(), offset);
exchangeNode.computeStatistics(optExpr.getStatistics());
if (TopNType.ROW_NUMBER.equals(topNType)) {
exchangeNode.setLimit(limit);
} else {
exchangeNode.unsetLimit();
}
PlanFragment fragment =
new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition);
inputFragment.setDestination(exchangeNode);
inputFragment.setOutputPartition(dataPartition);
fragment.setQueryGlobalDicts(inputFragment.getQueryGlobalDicts());
context.getFragments().add(fragment);
return fragment;
}
private PlanFragment buildPartialTopNFragment(OptExpression optExpr, ExecPlan context,
List<ColumnRefOperator> partitionByColumns, long partitionLimit,
OrderSpec orderSpec, TopNType topNType, long limit, long offset,
PlanFragment inputFragment) {
List<Expr> resolvedTupleExprs = Lists.newArrayList();
List<Expr> partitionExprs = Lists.newArrayList();
List<Expr> sortExprs = Lists.newArrayList();
TupleDescriptor sortTuple = context.getDescTbl().createTupleDescriptor();
if (CollectionUtils.isNotEmpty(partitionByColumns)) {
for (ColumnRefOperator partitionByColumn : partitionByColumns) {
Expr expr = ScalarOperatorToExpr.buildExecExpression(partitionByColumn,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
partitionExprs.add(expr);
}
}
for (Ordering ordering : orderSpec.getOrderDescs()) {
Expr sortExpr = ScalarOperatorToExpr.buildExecExpression(ordering.getColumnRef(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(ordering.getColumnRef().getId()));
slotDesc.initFromExpr(sortExpr);
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(sortExpr.isNullable());
slotDesc.setType(sortExpr.getType());
context.getColRefToExpr()
.put(ordering.getColumnRef(), new SlotRef(ordering.getColumnRef().toString(), slotDesc));
resolvedTupleExprs.add(sortExpr);
sortExprs.add(new SlotRef(slotDesc));
}
ColumnRefSet columnRefSet = optExpr.inputAt(0).getLogicalProperty().getOutputColumns();
for (int i = 0; i < columnRefSet.getColumnIds().length; ++i) {
/*
* Add column not be used in ordering
*/
ColumnRefOperator columnRef = columnRefFactory.getColumnRef(columnRefSet.getColumnIds()[i]);
if (orderSpec.getOrderDescs().stream().map(Ordering::getColumnRef)
.noneMatch(c -> c.equals(columnRef))) {
Expr outputExpr = ScalarOperatorToExpr.buildExecExpression(columnRef,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(columnRef.getId()));
slotDesc.initFromExpr(outputExpr);
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(outputExpr.isNullable());
slotDesc.setType(outputExpr.getType());
context.getColRefToExpr().put(columnRef, new SlotRef(columnRef.toString(), slotDesc));
resolvedTupleExprs.add(outputExpr);
}
}
sortTuple.computeMemLayout();
SortInfo sortInfo = new SortInfo(partitionExprs, partitionLimit, sortExprs,
orderSpec.getOrderDescs().stream().map(Ordering::isAscending).collect(Collectors.toList()),
orderSpec.getOrderDescs().stream().map(Ordering::isNullsFirst).collect(Collectors.toList()));
sortInfo.setMaterializedTupleInfo(sortTuple, resolvedTupleExprs);
SortNode sortNode = new SortNode(
context.getNextNodeId(),
inputFragment.getPlanRoot(),
sortInfo,
limit != Operator.DEFAULT_LIMIT,
limit == Operator.DEFAULT_LIMIT,
0);
sortNode.setTopNType(topNType);
sortNode.setLimit(limit);
sortNode.setOffset(offset);
sortNode.resolvedTupleExprs = resolvedTupleExprs;
sortNode.setHasNullableGenerateChild();
sortNode.computeStatistics(optExpr.getStatistics());
if (shouldBuildGlobalRuntimeFilter()) {
sortNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl());
}
inputFragment.setPlanRoot(sortNode);
return inputFragment;
}
private void setJoinPushDown(JoinNode node) {
node.setIsPushDown(ConnectContext.get().getSessionVariable().isHashJoinPushDownRightTable()
&& (node.getJoinOp().isInnerJoin() || node.getJoinOp().isLeftSemiJoin() ||
node.getJoinOp().isRightJoin()));
}
private boolean shouldBuildGlobalRuntimeFilter() {
return ConnectContext.get() != null &&
(ConnectContext.get().getSessionVariable().getEnableGlobalRuntimeFilter() ||
ConnectContext.get().getSessionVariable().isEnablePipelineEngine());
}
@Override
public PlanFragment visitPhysicalHashJoin(OptExpression optExpr, ExecPlan context) {
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
return visitPhysicalJoin(leftFragment, rightFragment, optExpr, context);
}
private List<Expr> extractConjuncts(ScalarOperator predicate, ExecPlan context) {
return Utils.extractConjuncts(predicate).stream()
.map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
}
private void setNullableForJoin(JoinOperator joinOperator,
PlanFragment leftFragment, PlanFragment rightFragment, ExecPlan context) {
Set<TupleId> nullableTupleIds = new HashSet<>();
nullableTupleIds.addAll(leftFragment.getPlanRoot().getNullableTupleIds());
nullableTupleIds.addAll(rightFragment.getPlanRoot().getNullableTupleIds());
if (joinOperator.isLeftOuterJoin()) {
nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds());
} else if (joinOperator.isRightOuterJoin()) {
nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds());
} else if (joinOperator.isFullOuterJoin()) {
nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds());
nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds());
}
for (TupleId tupleId : nullableTupleIds) {
TupleDescriptor tupleDescriptor = context.getDescTbl().getTupleDesc(tupleId);
tupleDescriptor.getSlots().forEach(slot -> slot.setIsNullable(true));
tupleDescriptor.computeMemLayout();
}
}
@Override
public PlanFragment visitPhysicalNestLoopJoin(OptExpression optExpr, ExecPlan context) {
PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp();
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
List<Expr> conjuncts = extractConjuncts(node.getPredicate(), context);
List<Expr> joinOnConjuncts = extractConjuncts(node.getOnPredicate(), context);
List<Expr> probePartitionByExprs = Lists.newArrayList();
DistributionSpec leftDistributionSpec =
optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec();
DistributionSpec rightDistributionSpec =
optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec();
if (leftDistributionSpec instanceof HashDistributionSpec &&
rightDistributionSpec instanceof HashDistributionSpec) {
probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context);
}
setNullableForJoin(node.getJoinType(), leftFragment, rightFragment, context);
NestLoopJoinNode joinNode = new NestLoopJoinNode(context.getNextNodeId(),
leftFragment.getPlanRoot(), rightFragment.getPlanRoot(),
null, node.getJoinType(), Lists.newArrayList(), joinOnConjuncts);
joinNode.setLimit(node.getLimit());
joinNode.computeStatistics(optExpr.getStatistics());
joinNode.addConjuncts(conjuncts);
joinNode.setProbePartitionByExprs(probePartitionByExprs);
rightFragment.getPlanRoot().setFragment(leftFragment);
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
if (!(joinNode.getChild(1) instanceof ExchangeNode)) {
joinNode.setReplicated(true);
}
if (shouldBuildGlobalRuntimeFilter()) {
joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl());
}
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
}
@Override
public PlanFragment visitPhysicalMergeJoin(OptExpression optExpr, ExecPlan context) {
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
PlanNode leftPlanRoot = leftFragment.getPlanRoot();
PlanNode rightPlanRoot = rightFragment.getPlanRoot();
OptExpression leftExpression = optExpr.inputAt(0);
OptExpression rightExpression = optExpr.inputAt(1);
boolean needDealSort = leftExpression.getInputs().size() > 0 && rightExpression.getInputs().size() > 0;
if (needDealSort) {
optExpr.setChild(0, leftExpression.inputAt(0));
optExpr.setChild(1, rightExpression.inputAt(0));
leftFragment.setPlanRoot(leftPlanRoot.getChild(0));
rightFragment.setPlanRoot(rightPlanRoot.getChild(0));
}
PlanFragment planFragment = visitPhysicalJoin(leftFragment, rightFragment, optExpr, context);
if (needDealSort) {
leftExpression.setChild(0, optExpr.inputAt(0));
rightExpression.setChild(0, optExpr.inputAt(1));
optExpr.setChild(0, leftExpression);
optExpr.setChild(1, rightExpression);
planFragment.getPlanRoot().setChild(0, leftPlanRoot);
planFragment.getPlanRoot().setChild(1, rightPlanRoot);
}
return planFragment;
}
private List<ColumnRefOperator> getShuffleColumns(HashDistributionSpec spec) {
List<Integer> columnRefs = spec.getShuffleColumns();
Preconditions.checkState(!columnRefs.isEmpty());
List<ColumnRefOperator> shuffleColumns = new ArrayList<>();
for (int columnId : columnRefs) {
shuffleColumns.add(columnRefFactory.getColumnRef(columnId));
}
return shuffleColumns;
}
private List<Expr> getShuffleExprs(HashDistributionSpec hashDistributionSpec, ExecPlan context) {
List<ColumnRefOperator> shuffleColumns = getShuffleColumns(hashDistributionSpec);
return shuffleColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
}
private PlanFragment visitPhysicalJoin(PlanFragment leftFragment, PlanFragment rightFragment,
OptExpression optExpr, ExecPlan context) {
PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp();
ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns();
ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns();
JoinOperator joinOperator = node.getJoinType();
Preconditions.checkState(!joinOperator.isCrossJoin(), "should not be cross join");
PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot();
PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot();
if (leftFragmentPlanRoot instanceof DecodeNode) {
leftFragmentPlanRoot = leftFragmentPlanRoot.getChild(0);
}
if (rightFragmentPlanRoot instanceof DecodeNode) {
rightFragmentPlanRoot = rightFragmentPlanRoot.getChild(0);
}
List<Expr> probePartitionByExprs = Lists.newArrayList();
DistributionSpec leftDistributionSpec =
optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec();
DistributionSpec rightDistributionSpec =
optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec();
if (leftDistributionSpec instanceof HashDistributionSpec &&
rightDistributionSpec instanceof HashDistributionSpec) {
probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context);
}
JoinNode.DistributionMode distributionMode =
inferDistributionMode(optExpr, leftFragmentPlanRoot, rightFragmentPlanRoot);
JoinExprInfo joinExpr = buildJoinExpr(optExpr, context);
List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts;
List<Expr> otherJoinConjuncts = joinExpr.otherJoin;
List<Expr> conjuncts = joinExpr.conjuncts;
setNullableForJoin(joinOperator, leftFragment, rightFragment, context);
JoinNode joinNode;
if (node instanceof PhysicalHashJoinOperator) {
joinNode = new HashJoinNode(
context.getNextNodeId(),
leftFragment.getPlanRoot(), rightFragment.getPlanRoot(),
joinOperator, eqJoinConjuncts, otherJoinConjuncts);
} else if (node instanceof PhysicalMergeJoinOperator) {
joinNode = new MergeJoinNode(
context.getNextNodeId(),
leftFragment.getPlanRoot(), rightFragment.getPlanRoot(),
joinOperator, eqJoinConjuncts, otherJoinConjuncts);
} else {
throw new StarRocksPlannerException("unknown join operator: " + node, INTERNAL_ERROR);
}
if (node.getProjection() != null) {
ColumnRefSet outputColumns = new ColumnRefSet();
for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) {
outputColumns.union(s.getUsedColumns());
}
for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) {
outputColumns.union(s.getUsedColumns());
}
outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet()));
joinNode.setOutputSlots(outputColumns.getStream().collect(Collectors.toList()));
}
joinNode.setDistributionMode(distributionMode);
joinNode.getConjuncts().addAll(conjuncts);
joinNode.setLimit(node.getLimit());
joinNode.computeStatistics(optExpr.getStatistics());
joinNode.setProbePartitionByExprs(probePartitionByExprs);
if (shouldBuildGlobalRuntimeFilter()) {
joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl());
}
return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode);
}
private boolean isExchangeWithDistributionType(PlanNode node, DistributionSpec.DistributionType expectedType) {
if (!(node instanceof ExchangeNode)) {
return false;
}
ExchangeNode exchangeNode = (ExchangeNode) node;
return Objects.equals(exchangeNode.getDistributionType(), expectedType);
}
private boolean isColocateJoin(OptExpression optExpression) {
return optExpression.getRequiredProperties().stream().allMatch(
physicalPropertySet -> {
if (!physicalPropertySet.getDistributionProperty().isShuffle()) {
return false;
}
HashDistributionDesc.SourceType hashSourceType =
((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec()))
.getHashDistributionDesc().getSourceType();
return hashSourceType.equals(HashDistributionDesc.SourceType.LOCAL);
});
}
public boolean isShuffleJoin(OptExpression optExpression) {
return optExpression.getRequiredProperties().stream().allMatch(
physicalPropertySet -> {
if (!physicalPropertySet.getDistributionProperty().isShuffle()) {
return false;
}
HashDistributionDesc.SourceType hashSourceType =
((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec()))
.getHashDistributionDesc().getSourceType();
return hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_JOIN) ||
hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_ENFORCE) ||
hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_AGG);
});
}
public PlanFragment computeBucketShufflePlanFragment(ExecPlan context,
PlanFragment stayFragment,
PlanFragment removeFragment, JoinNode hashJoinNode) {
hashJoinNode.setLocalHashBucket(true);
hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs());
removeFragment.getChild(0)
.setOutputPartition(new DataPartition(TPartitionType.BUCKET_SHUFFLE_HASH_PARTITIONED,
removeFragment.getDataPartition().getPartitionExprs()));
context.getFragments().remove(removeFragment);
context.getFragments().remove(stayFragment);
context.getFragments().add(stayFragment);
stayFragment.setPlanRoot(hashJoinNode);
stayFragment.addChildren(removeFragment.getChildren());
stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts());
return stayFragment;
}
public PlanFragment computeShuffleHashBucketPlanFragment(ExecPlan context,
PlanFragment stayFragment,
PlanFragment removeFragment,
JoinNode hashJoinNode) {
hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs());
DataPartition dataPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
removeFragment.getDataPartition().getPartitionExprs());
removeFragment.getChild(0).setOutputPartition(dataPartition);
context.getFragments().remove(removeFragment);
context.getFragments().remove(stayFragment);
context.getFragments().add(stayFragment);
stayFragment.setPlanRoot(hashJoinNode);
stayFragment.addChildren(removeFragment.getChildren());
stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts());
return stayFragment;
}
@Override
public PlanFragment visitPhysicalAssertOneRow(OptExpression optExpression, ExecPlan context) {
PlanFragment inputFragment = visit(optExpression.inputAt(0), context);
for (TupleId id : inputFragment.getPlanRoot().getTupleIds()) {
context.getDescTbl().getTupleDesc(id).getSlots().forEach(s -> s.setIsNullable(true));
}
PhysicalAssertOneRowOperator assertOneRow = (PhysicalAssertOneRowOperator) optExpression.getOp();
AssertNumRowsNode node =
new AssertNumRowsNode(context.getNextNodeId(), inputFragment.getPlanRoot(),
new AssertNumRowsElement(assertOneRow.getCheckRows(), assertOneRow.getTips(),
assertOneRow.getAssertion()));
node.computeStatistics(optExpression.getStatistics());
inputFragment.setPlanRoot(node);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalAnalytic(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalWindowOperator node = (PhysicalWindowOperator) optExpr.getOp();
List<Expr> analyticFnCalls = new ArrayList<>();
TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor();
for (Map.Entry<ColumnRefOperator, CallOperator> analyticCall : node.getAnalyticCall().entrySet()) {
Expr analyticFunction = ScalarOperatorToExpr.buildExecExpression(analyticCall.getValue(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()));
analyticFnCalls.add(analyticFunction);
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(outputTupleDesc, new SlotId(analyticCall.getKey().getId()));
slotDesc.setType(analyticFunction.getType());
slotDesc.setIsNullable(analyticFunction.isNullable());
slotDesc.setIsMaterialized(true);
context.getColRefToExpr()
.put(analyticCall.getKey(), new SlotRef(analyticCall.getKey().toString(), slotDesc));
}
List<Expr> partitionExprs =
node.getPartitionExpressions().stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
List<OrderByElement> orderByElements = node.getOrderByElements().stream().map(e -> new OrderByElement(
ScalarOperatorToExpr.buildExecExpression(e.getColumnRef(),
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())),
e.isAscending(), e.isNullsFirst())).collect(Collectors.toList());
AnalyticEvalNode analyticEvalNode = new AnalyticEvalNode(
context.getNextNodeId(),
inputFragment.getPlanRoot(),
analyticFnCalls,
partitionExprs,
orderByElements,
node.getAnalyticWindow(),
node.isUseHashBasedPartition(),
null, outputTupleDesc, null, null,
context.getDescTbl().createTupleDescriptor());
analyticEvalNode.setSubstitutedPartitionExprs(partitionExprs);
analyticEvalNode.setLimit(node.getLimit());
analyticEvalNode.setHasNullableGenerateChild();
analyticEvalNode.computeStatistics(optExpr.getStatistics());
if (hasColocateOlapScanChildInFragment(analyticEvalNode)) {
analyticEvalNode.setColocate(true);
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
analyticEvalNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
PlanNode root = inputFragment.getPlanRoot();
if (root instanceof SortNode) {
SortNode sortNode = (SortNode) root;
sortNode.setAnalyticPartitionExprs(analyticEvalNode.getPartitionExprs());
}
if (optExpr.getLogicalProperty().oneTabletProperty().supportOneTabletOpt) {
clearOlapScanNodePartitions(analyticEvalNode);
}
inputFragment.setPlanRoot(analyticEvalNode);
return inputFragment;
}
private PlanFragment buildSetOperation(OptExpression optExpr, ExecPlan context, OperatorType operatorType) {
PhysicalSetOperation setOperation = (PhysicalSetOperation) optExpr.getOp();
TupleDescriptor setOperationTuple = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) {
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(setOperationTuple, new SlotId(columnRefOperator.getId()));
slotDesc.setType(columnRefOperator.getType());
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(columnRefOperator.isNullable());
context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc));
}
SetOperationNode setOperationNode;
boolean isUnion = false;
if (operatorType.equals(OperatorType.PHYSICAL_UNION)) {
isUnion = true;
setOperationNode = new UnionNode(context.getNextNodeId(), setOperationTuple.getId());
setOperationNode.setFirstMaterializedChildIdx_(optExpr.arity());
} else if (operatorType.equals(OperatorType.PHYSICAL_EXCEPT)) {
setOperationNode = new ExceptNode(context.getNextNodeId(), setOperationTuple.getId());
} else if (operatorType.equals(OperatorType.PHYSICAL_INTERSECT)) {
setOperationNode = new IntersectNode(context.getNextNodeId(), setOperationTuple.getId());
} else {
throw new StarRocksPlannerException("Unsupported set operation", INTERNAL_ERROR);
}
List<Map<Integer, Integer>> outputSlotIdToChildSlotIdMaps = new ArrayList<>();
for (int childIdx = 0; childIdx < optExpr.arity(); ++childIdx) {
Map<Integer, Integer> slotIdMap = new HashMap<>();
List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(childIdx);
Preconditions.checkState(childOutput.size() == setOperation.getOutputColumnRefOp().size());
for (int columnIdx = 0; columnIdx < setOperation.getOutputColumnRefOp().size(); ++columnIdx) {
Integer resultColumnIdx = setOperation.getOutputColumnRefOp().get(columnIdx).getId();
slotIdMap.put(resultColumnIdx, childOutput.get(columnIdx).getId());
}
outputSlotIdToChildSlotIdMaps.add(slotIdMap);
Preconditions.checkState(slotIdMap.size() == setOperation.getOutputColumnRefOp().size());
}
setOperationNode.setOutputSlotIdToChildSlotIdMaps(outputSlotIdToChildSlotIdMaps);
Preconditions.checkState(optExpr.getInputs().size() == setOperation.getChildOutputColumns().size());
PlanFragment setOperationFragment =
new PlanFragment(context.getNextFragmentId(), setOperationNode, DataPartition.RANDOM);
List<List<Expr>> materializedResultExprLists = Lists.newArrayList();
for (int i = 0; i < optExpr.getInputs().size(); i++) {
List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(i);
PlanFragment fragment = visit(optExpr.getInputs().get(i), context);
List<Expr> materializedExpressions = Lists.newArrayList();
for (ColumnRefOperator ref : childOutput) {
SlotDescriptor slotDescriptor = context.getDescTbl().getSlotDesc(new SlotId(ref.getId()));
materializedExpressions.add(new SlotRef(slotDescriptor));
}
materializedResultExprLists.add(materializedExpressions);
if (isUnion) {
fragment.setOutputPartition(DataPartition.RANDOM);
} else {
fragment.setOutputPartition(DataPartition.hashPartitioned(materializedExpressions));
}
ExchangeNode exchangeNode =
new ExchangeNode(context.getNextNodeId(), fragment.getPlanRoot(), fragment.getDataPartition());
exchangeNode.setFragment(setOperationFragment);
fragment.setDestination(exchangeNode);
setOperationNode.addChild(exchangeNode);
}
setOperationNode.setHasNullableGenerateChild();
List<Expr> setOutputList = Lists.newArrayList();
for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) {
SlotDescriptor slotDesc = context.getDescTbl().getSlotDesc(new SlotId(columnRefOperator.getId()));
slotDesc.setIsNullable(slotDesc.getIsNullable() | setOperationNode.isHasNullableGenerateChild());
setOutputList.add(new SlotRef(String.valueOf(columnRefOperator.getId()), slotDesc));
}
setOperationTuple.computeMemLayout();
setOperationNode.setSetOperationOutputList(setOutputList);
setOperationNode.setMaterializedResultExprLists_(materializedResultExprLists);
setOperationNode.setLimit(setOperation.getLimit());
setOperationNode.computeStatistics(optExpr.getStatistics());
context.getFragments().add(setOperationFragment);
return setOperationFragment;
}
@Override
public PlanFragment visitPhysicalUnion(OptExpression optExpr, ExecPlan context) {
return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_UNION);
}
@Override
public PlanFragment visitPhysicalExcept(OptExpression optExpr, ExecPlan context) {
return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_EXCEPT);
}
@Override
public PlanFragment visitPhysicalIntersect(OptExpression optExpr, ExecPlan context) {
return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_INTERSECT);
}
@Override
public PlanFragment visitPhysicalRepeat(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalRepeatOperator repeatOperator = (PhysicalRepeatOperator) optExpr.getOp();
TupleDescriptor outputGroupingTuple = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : repeatOperator.getOutputGrouping()) {
SlotDescriptor slotDesc = context.getDescTbl()
.addSlotDescriptor(outputGroupingTuple, new SlotId(columnRefOperator.getId()));
slotDesc.setType(columnRefOperator.getType());
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(columnRefOperator.isNullable());
context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc));
}
outputGroupingTuple.computeMemLayout();
List<Set<Integer>> repeatSlotIdList = new ArrayList<>();
for (List<ColumnRefOperator> repeat : repeatOperator.getRepeatColumnRef()) {
repeatSlotIdList.add(
repeat.stream().map(ColumnRefOperator::getId).collect(Collectors.toSet()));
}
RepeatNode repeatNode = new RepeatNode(
context.getNextNodeId(),
inputFragment.getPlanRoot(),
outputGroupingTuple,
repeatSlotIdList,
repeatOperator.getGroupingIds());
List<ScalarOperator> predicates = Utils.extractConjuncts(repeatOperator.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
repeatNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
repeatNode.computeStatistics(optExpr.getStatistics());
inputFragment.setPlanRoot(repeatNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalFilter(OptExpression optExpr, ExecPlan context) {
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
PhysicalFilterOperator filter = (PhysicalFilterOperator) optExpr.getOp();
List<Expr> predicates = Utils.extractConjuncts(filter.getPredicate()).stream()
.map(d -> ScalarOperatorToExpr.buildExecExpression(d,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
SelectNode selectNode =
new SelectNode(context.getNextNodeId(), inputFragment.getPlanRoot(), predicates);
selectNode.setLimit(filter.getLimit());
selectNode.computeStatistics(optExpr.getStatistics());
inputFragment.setPlanRoot(selectNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalTableFunction(OptExpression optExpression, ExecPlan context) {
PlanFragment inputFragment = visit(optExpression.inputAt(0), context);
PhysicalTableFunctionOperator physicalTableFunction = (PhysicalTableFunctionOperator) optExpression.getOp();
TupleDescriptor udtfOutputTuple = context.getDescTbl().createTupleDescriptor();
for (ColumnRefOperator columnRefOperator : physicalTableFunction.getOutputColRefs()) {
SlotDescriptor slotDesc =
context.getDescTbl().addSlotDescriptor(udtfOutputTuple, new SlotId(columnRefOperator.getId()));
slotDesc.setType(columnRefOperator.getType());
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(columnRefOperator.isNullable());
context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc));
}
udtfOutputTuple.computeMemLayout();
TableFunctionNode tableFunctionNode = new TableFunctionNode(context.getNextNodeId(),
inputFragment.getPlanRoot(),
udtfOutputTuple,
physicalTableFunction.getFn(),
physicalTableFunction.getFnParamColumnRefs().stream().map(ColumnRefOperator::getId)
.collect(Collectors.toList()),
physicalTableFunction.getOuterColRefs().stream().map(ColumnRefOperator::getId)
.collect(Collectors.toList()),
physicalTableFunction.getFnResultColRefs().stream().map(ColumnRefOperator::getId)
.collect(Collectors.toList())
);
tableFunctionNode.computeStatistics(optExpression.getStatistics());
tableFunctionNode.setLimit(physicalTableFunction.getLimit());
inputFragment.setPlanRoot(tableFunctionNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalLimit(OptExpression optExpression, ExecPlan context) {
return visit(optExpression.inputAt(0), context);
}
@Override
public PlanFragment visitPhysicalCTEConsume(OptExpression optExpression, ExecPlan context) {
PhysicalCTEConsumeOperator consume = (PhysicalCTEConsumeOperator) optExpression.getOp();
int cteId = consume.getCteId();
MultiCastPlanFragment cteFragment = (MultiCastPlanFragment) context.getCteProduceFragments().get(cteId);
ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(),
cteFragment.getPlanRoot(), DistributionSpec.DistributionType.SHUFFLE);
exchangeNode.setReceiveColumns(consume.getCteOutputColumnRefMap().values().stream()
.map(ColumnRefOperator::getId).collect(Collectors.toList()));
exchangeNode.setDataPartition(cteFragment.getDataPartition());
exchangeNode.setNumInstances(cteFragment.getPlanRoot().getNumInstances());
PlanFragment consumeFragment = new PlanFragment(context.getNextFragmentId(), exchangeNode,
cteFragment.getDataPartition());
Map<ColumnRefOperator, ScalarOperator> projectMap = Maps.newHashMap();
projectMap.putAll(consume.getCteOutputColumnRefMap());
consumeFragment = buildProjectNode(optExpression, new Projection(projectMap), consumeFragment, context);
consumeFragment.setQueryGlobalDicts(cteFragment.getQueryGlobalDicts());
consumeFragment.setLoadGlobalDicts(cteFragment.getLoadGlobalDicts());
if (consume.getPredicate() != null) {
List<Expr> predicates = Utils.extractConjuncts(consume.getPredicate()).stream()
.map(d -> ScalarOperatorToExpr.buildExecExpression(d,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
SelectNode selectNode =
new SelectNode(context.getNextNodeId(), consumeFragment.getPlanRoot(), predicates);
selectNode.computeStatistics(optExpression.getStatistics());
consumeFragment.setPlanRoot(selectNode);
}
if (consume.hasLimit()) {
consumeFragment.getPlanRoot().setLimit(consume.getLimit());
}
cteFragment.getDestNodeList().add(exchangeNode);
consumeFragment.addChild(cteFragment);
context.getFragments().add(consumeFragment);
return consumeFragment;
}
@Override
public PlanFragment visitPhysicalCTEProduce(OptExpression optExpression, ExecPlan context) {
PlanFragment child = visit(optExpression.inputAt(0), context);
int cteId = ((PhysicalCTEProduceOperator) optExpression.getOp()).getCteId();
context.getFragments().remove(child);
MultiCastPlanFragment cteProduce = new MultiCastPlanFragment(child);
List<Expr> outputs = Lists.newArrayList();
optExpression.getOutputColumns().getStream()
.forEach(i -> outputs.add(context.getColRefToExpr().get(columnRefFactory.getColumnRef(i))));
cteProduce.setOutputExprs(outputs);
context.getCteProduceFragments().put(cteId, cteProduce);
context.getFragments().add(cteProduce);
return child;
}
@Override
public PlanFragment visitPhysicalCTEAnchor(OptExpression optExpression, ExecPlan context) {
visit(optExpression.inputAt(0), context);
return visit(optExpression.inputAt(1), context);
}
@Override
public PlanFragment visitPhysicalNoCTE(OptExpression optExpression, ExecPlan context) {
return visit(optExpression.inputAt(0), context);
}
static class JoinExprInfo {
public final List<Expr> eqJoinConjuncts;
public final List<Expr> otherJoin;
public final List<Expr> conjuncts;
public JoinExprInfo(List<Expr> eqJoinConjuncts, List<Expr> otherJoin, List<Expr> conjuncts) {
this.eqJoinConjuncts = eqJoinConjuncts;
this.otherJoin = otherJoin;
this.conjuncts = conjuncts;
}
}
private JoinExprInfo buildJoinExpr(OptExpression optExpr, ExecPlan context) {
ScalarOperator predicate = optExpr.getOp().getPredicate();
ScalarOperator onPredicate;
if (optExpr.getOp() instanceof PhysicalJoinOperator) {
onPredicate = ((PhysicalJoinOperator) optExpr.getOp()).getOnPredicate();
} else if (optExpr.getOp() instanceof PhysicalStreamJoinOperator) {
onPredicate = ((PhysicalStreamJoinOperator) optExpr.getOp()).getOnPredicate();
} else {
throw new IllegalStateException("not supported join " + optExpr.getOp());
}
List<ScalarOperator> onPredicates = Utils.extractConjuncts(onPredicate);
ColumnRefSet leftChildColumns = optExpr.inputAt(0).getOutputColumns();
ColumnRefSet rightChildColumns = optExpr.inputAt(1).getOutputColumns();
List<BinaryPredicateOperator> eqOnPredicates = JoinHelper.getEqualsPredicate(
leftChildColumns, rightChildColumns, onPredicates);
Preconditions.checkState(!eqOnPredicates.isEmpty(), "must be eq-join");
for (BinaryPredicateOperator s : eqOnPredicates) {
if (!optExpr.inputAt(0).getLogicalProperty().getOutputColumns()
.containsAll(s.getChild(0).getUsedColumns())) {
s.swap();
}
}
List<Expr> eqJoinConjuncts =
eqOnPredicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
for (Expr expr : eqJoinConjuncts) {
if (expr.isConstant()) {
throw unsupportedException("Support join on constant predicate later");
}
}
List<ScalarOperator> otherJoin = Utils.extractConjuncts(onPredicate);
otherJoin.removeAll(eqOnPredicates);
List<Expr> otherJoinConjuncts = otherJoin.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
List<ScalarOperator> predicates = Utils.extractConjuncts(predicate);
List<Expr> conjuncts = predicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e,
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())))
.collect(Collectors.toList());
return new JoinExprInfo(eqJoinConjuncts, otherJoinConjuncts, conjuncts);
}
@Override
public PlanFragment visitPhysicalStreamJoin(OptExpression optExpr, ExecPlan context) {
PhysicalStreamJoinOperator node = (PhysicalStreamJoinOperator) optExpr.getOp();
PlanFragment leftFragment = visit(optExpr.inputAt(0), context);
PlanFragment rightFragment = visit(optExpr.inputAt(1), context);
ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns();
ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns();
if (!node.getJoinType().isInnerJoin()) {
throw new NotImplementedException("Only inner join is supported");
}
JoinOperator joinOperator = node.getJoinType();
PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot();
PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot();
JoinNode.DistributionMode distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET;
JoinExprInfo joinExpr = buildJoinExpr(optExpr, context);
List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts;
List<Expr> otherJoinConjuncts = joinExpr.otherJoin;
List<Expr> conjuncts = joinExpr.conjuncts;
List<PlanFragment> nullablePlanFragments = new ArrayList<>();
if (joinOperator.isLeftOuterJoin()) {
nullablePlanFragments.add(rightFragment);
} else if (joinOperator.isRightOuterJoin()) {
nullablePlanFragments.add(leftFragment);
} else if (joinOperator.isFullOuterJoin()) {
nullablePlanFragments.add(leftFragment);
nullablePlanFragments.add(rightFragment);
}
for (PlanFragment planFragment : nullablePlanFragments) {
for (TupleId tupleId : planFragment.getPlanRoot().getTupleIds()) {
context.getDescTbl().getTupleDesc(tupleId).getSlots().forEach(slot -> slot.setIsNullable(true));
}
}
JoinNode joinNode =
new StreamJoinNode(context.getNextNodeId(), leftFragmentPlanRoot, rightFragmentPlanRoot,
node.getJoinType(), eqJoinConjuncts, otherJoinConjuncts);
if (node.getProjection() != null) {
ColumnRefSet outputColumns = new ColumnRefSet();
for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) {
outputColumns.union(s.getUsedColumns());
}
for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) {
outputColumns.union(s.getUsedColumns());
}
outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet()));
joinNode.setOutputSlots(outputColumns.getStream().collect(Collectors.toList()));
}
joinNode.setDistributionMode(distributionMode);
joinNode.getConjuncts().addAll(conjuncts);
joinNode.setLimit(node.getLimit());
joinNode.computeStatistics(optExpr.getStatistics());
return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode);
}
@NotNull
private PlanFragment buildJoinFragment(ExecPlan context, PlanFragment leftFragment, PlanFragment rightFragment,
JoinNode.DistributionMode distributionMode, JoinNode joinNode) {
if (distributionMode.equals(JoinNode.DistributionMode.BROADCAST)) {
setJoinPushDown(joinNode);
rightFragment.getPlanRoot().setFragment(leftFragment);
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
} else if (distributionMode.equals(JoinNode.DistributionMode.PARTITIONED)) {
DataPartition lhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
leftFragment.getDataPartition().getPartitionExprs());
DataPartition rhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
rightFragment.getDataPartition().getPartitionExprs());
leftFragment.getChild(0).setOutputPartition(lhsJoinPartition);
rightFragment.getChild(0).setOutputPartition(rhsJoinPartition);
context.getFragments().remove(leftFragment);
context.getFragments().remove(rightFragment);
PlanFragment joinFragment = new PlanFragment(context.getNextFragmentId(),
joinNode, lhsJoinPartition);
joinFragment.addChildren(leftFragment.getChildren());
joinFragment.addChildren(rightFragment.getChildren());
joinFragment.mergeQueryGlobalDicts(leftFragment.getQueryGlobalDicts());
joinFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
context.getFragments().add(joinFragment);
return joinFragment;
} else if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE) ||
distributionMode.equals(JoinNode.DistributionMode.REPLICATED)) {
if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE)) {
joinNode.setColocate(true, "");
} else {
joinNode.setReplicated(true);
}
setJoinPushDown(joinNode);
joinNode.setChild(0, leftFragment.getPlanRoot());
joinNode.setChild(1, rightFragment.getPlanRoot());
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
} else if (distributionMode.equals(JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET)) {
setJoinPushDown(joinNode);
if (!(leftFragment.getPlanRoot() instanceof ExchangeNode) &&
!(rightFragment.getPlanRoot() instanceof ExchangeNode)) {
joinNode.setChild(0, leftFragment.getPlanRoot());
joinNode.setChild(1, rightFragment.getPlanRoot());
leftFragment.setPlanRoot(joinNode);
leftFragment.addChildren(rightFragment.getChildren());
context.getFragments().remove(rightFragment);
context.getFragments().remove(leftFragment);
context.getFragments().add(leftFragment);
leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts());
return leftFragment;
} else if (leftFragment.getPlanRoot() instanceof ExchangeNode &&
!(rightFragment.getPlanRoot() instanceof ExchangeNode)) {
return computeShuffleHashBucketPlanFragment(context, rightFragment,
leftFragment, joinNode);
} else {
return computeShuffleHashBucketPlanFragment(context, leftFragment,
rightFragment, joinNode);
}
} else {
setJoinPushDown(joinNode);
if (leftFragment.getPlanRoot() instanceof ExchangeNode &&
!(rightFragment.getPlanRoot() instanceof ExchangeNode)) {
leftFragment = computeBucketShufflePlanFragment(context, rightFragment,
leftFragment, joinNode);
} else {
leftFragment = computeBucketShufflePlanFragment(context, leftFragment,
rightFragment, joinNode);
}
return leftFragment;
}
}
@NotNull
private JoinNode.DistributionMode inferDistributionMode(OptExpression optExpr, PlanNode leftFragmentPlanRoot,
PlanNode rightFragmentPlanRoot) {
JoinNode.DistributionMode distributionMode;
if (isExchangeWithDistributionType(leftFragmentPlanRoot, DistributionSpec.DistributionType.SHUFFLE) &&
isExchangeWithDistributionType(rightFragmentPlanRoot,
DistributionSpec.DistributionType.SHUFFLE)) {
distributionMode = JoinNode.DistributionMode.PARTITIONED;
} else if (isExchangeWithDistributionType(rightFragmentPlanRoot,
DistributionSpec.DistributionType.BROADCAST)) {
distributionMode = JoinNode.DistributionMode.BROADCAST;
} else if (!(leftFragmentPlanRoot instanceof ExchangeNode) &&
!(rightFragmentPlanRoot instanceof ExchangeNode)) {
if (isColocateJoin(optExpr)) {
distributionMode = HashJoinNode.DistributionMode.COLOCATE;
} else if (ConnectContext.get().getSessionVariable().isEnableReplicationJoin() &&
rightFragmentPlanRoot.canDoReplicatedJoin()) {
distributionMode = JoinNode.DistributionMode.REPLICATED;
} else if (isShuffleJoin(optExpr)) {
distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET;
} else {
Preconditions.checkState(false, "Must be colocate/bucket/replicate join");
distributionMode = JoinNode.DistributionMode.COLOCATE;
}
} else if (isShuffleJoin(optExpr)) {
distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET;
} else {
distributionMode = JoinNode.DistributionMode.LOCAL_HASH_BUCKET;
}
return distributionMode;
}
@Override
public PlanFragment visitPhysicalStreamAgg(OptExpression optExpr, ExecPlan context) {
PhysicalStreamAggOperator node = (PhysicalStreamAggOperator) optExpr.getOp();
PlanFragment inputFragment = visit(optExpr.inputAt(0), context);
TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor();
AggregateExprInfo aggExpr =
buildAggregateTuple(node.getAggregations(), node.getGroupBys(), null, outputTupleDesc, context);
AggregateInfo aggInfo =
AggregateInfo.create(aggExpr.groupExpr, aggExpr.aggregateExpr, outputTupleDesc, outputTupleDesc,
AggregateInfo.AggPhase.FIRST);
StreamAggNode aggNode = new StreamAggNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo);
aggNode.setHasNullableGenerateChild();
aggNode.computeStatistics(optExpr.getStatistics());
inputFragment.setPlanRoot(aggNode);
return inputFragment;
}
@Override
public PlanFragment visitPhysicalStreamScan(OptExpression optExpr, ExecPlan context) {
PhysicalStreamScanOperator node = (PhysicalStreamScanOperator) optExpr.getOp();
OlapTable scanTable = (OlapTable) node.getTable();
context.getDescTbl().addReferencedTable(scanTable);
TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor();
tupleDescriptor.setTable(scanTable);
BinlogScanNode binlogScanNode = new BinlogScanNode(context.getNextNodeId(), tupleDescriptor);
binlogScanNode.computeStatistics(optExpr.getStatistics());
try {
binlogScanNode.computeScanRanges();
} catch (UserException e) {
throw new StarRocksPlannerException(
"Failed to compute scan ranges for StreamScanNode, " + e.getMessage(), INTERNAL_ERROR);
}
for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) {
SlotDescriptor slotDescriptor =
context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId()));
slotDescriptor.setColumn(entry.getValue());
slotDescriptor.setIsNullable(entry.getValue().isAllowNull());
slotDescriptor.setIsMaterialized(true);
context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor));
}
List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate());
ScalarOperatorToExpr.FormatterContext formatterContext =
new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr());
for (ScalarOperator predicate : predicates) {
binlogScanNode.getConjuncts()
.add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext));
}
tupleDescriptor.computeMemLayout();
context.getScanNodes().add(binlogScanNode);
PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), binlogScanNode, DataPartition.RANDOM);
context.getFragments().add(fragment);
return fragment;
}
} |
`try (queryAnalysisSpan.makeCurrent()) {` maybe not reasonable in Java8, a compile error will be raised. | public void executeByLegacy(TUniqueId queryId) throws Exception {
context.setStartTime();
plannerProfile.setQueryBeginTime();
context.setStmtId(STMT_ID_GENERATOR.incrementAndGet());
context.setQueryId(queryId);
if (parsedStmt instanceof QueryStmt) {
context.getState().setIsQuery(true);
}
if (parsedStmt instanceof UnifiedLoadStmt) {
final UnifiedLoadStmt unifiedLoadStmt = (UnifiedLoadStmt) parsedStmt;
unifiedLoadStmt.init();
final StatementBase proxyStmt = unifiedLoadStmt.getProxyStmt();
parsedStmt = proxyStmt;
if (!(proxyStmt instanceof LoadStmt)) {
Preconditions.checkState(
parsedStmt instanceof InsertStmt && ((InsertStmt) parsedStmt).isExternalLoad(),
new IllegalStateException("enable_unified_load=true, should be external insert stmt"));
}
}
try {
if (context.isTxnModel() && !(parsedStmt instanceof InsertStmt)
&& !(parsedStmt instanceof TransactionStmt)) {
throw new TException("This is in a transaction, only insert, commit, rollback is acceptable.");
}
analyzeVariablesInStmt();
if (!context.isTxnModel()) {
Span queryAnalysisSpan =
context.getTracer().spanBuilder("query analysis").setParent(Context.current()).startSpan();
try (Scope ignored = queryAnalysisSpan.makeCurrent()) {
analyze(context.getSessionVariable().toThrift());
} catch (Exception e) {
queryAnalysisSpan.recordException(e);
throw e;
} finally {
queryAnalysisSpan.end();
}
if (isForwardToMaster()) {
if (isProxy) {
throw new UserException("The statement has been forwarded to master FE("
+ Env.getCurrentEnv().getSelfNode().getIp() + ") and failed to execute"
+ " because Master FE is not ready. You may need to check FE's status");
}
forwardToMaster();
if (masterOpExecutor != null && masterOpExecutor.getQueryId() != null) {
context.setQueryId(masterOpExecutor.getQueryId());
}
return;
} else {
LOG.debug("no need to transfer to Master. stmt: {}", context.getStmtId());
}
} else {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.analyze(analyzer);
}
if (prepareStmt instanceof PrepareStmt && !isExecuteStmt) {
handlePrepareStmt();
return;
}
if (parsedStmt instanceof QueryStmt) {
if (!parsedStmt.isExplain()) {
if (checkBlockRules()) {
return;
}
}
handleQueryWithRetry(queryId);
} else if (parsedStmt instanceof SetStmt) {
handleSetStmt();
} else if (parsedStmt instanceof EnterStmt) {
handleEnterStmt();
} else if (parsedStmt instanceof SwitchStmt) {
handleSwitchStmt();
} else if (parsedStmt instanceof UseStmt) {
handleUseStmt();
} else if (parsedStmt instanceof TransactionStmt) {
handleTransactionStmt();
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
handleCtasStmt();
} else if (parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.isExternalLoad()) {
handleExternalInsertStmt();
} else {
try {
handleInsertStmt();
if (!insertStmt.getQueryStmt().isExplain()) {
queryType = "Load";
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", t.getMessage());
throw t;
}
}
} else if (parsedStmt instanceof LoadStmt) {
handleLoadStmt();
} else if (parsedStmt instanceof UpdateStmt) {
handleUpdateStmt();
} else if (parsedStmt instanceof DdlStmt) {
if (parsedStmt instanceof DeleteStmt && ((DeleteStmt) parsedStmt).getFromClause() != null) {
handleDeleteStmt();
} else {
handleDdlStmt();
}
} else if (parsedStmt instanceof ShowStmt) {
handleShow();
} else if (parsedStmt instanceof KillStmt) {
handleKill();
} else if (parsedStmt instanceof ExportStmt) {
handleExportStmt();
} else if (parsedStmt instanceof UnlockTablesStmt) {
handleUnlockTablesStmt();
} else if (parsedStmt instanceof LockTablesStmt) {
handleLockTablesStmt();
} else if (parsedStmt instanceof UnsupportedStmt) {
handleUnsupportedStmt();
} else {
context.getState().setError(ErrorCode.ERR_NOT_SUPPORTED_YET, "Do not support this query.");
}
} catch (IOException e) {
LOG.warn("execute IOException. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, e.getMessage());
throw e;
} catch (UserException e) {
LOG.warn("execute Exception. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
} catch (Exception e) {
LOG.warn("execute Exception. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR,
e.getClass().getSimpleName() + ", msg: " + Util.getRootCauseMessage(e));
if (parsedStmt instanceof KillStmt) {
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
}
} finally {
if (!context.isTxnModel() && parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (!insertStmt.isExternalLoad() && insertStmt.isTransactionBegin()
&& context.getState().getStateType() == MysqlStateType.ERR) {
try {
String errMsg = Strings.emptyToNull(context.getState().getErrorMessage());
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
(errMsg == null ? "unknown reason" : errMsg));
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn. {}", context.getQueryIdentifier(), abortTxnException);
}
}
}
}
} | try (Scope ignored = queryAnalysisSpan.makeCurrent()) { | public void executeByLegacy(TUniqueId queryId) throws Exception {
context.setStartTime();
profile.getSummaryProfile().setQueryBeginTime();
context.setStmtId(STMT_ID_GENERATOR.incrementAndGet());
context.setQueryId(queryId);
if (parsedStmt instanceof QueryStmt) {
context.getState().setIsQuery(true);
}
if (parsedStmt instanceof UnifiedLoadStmt) {
final UnifiedLoadStmt unifiedLoadStmt = (UnifiedLoadStmt) parsedStmt;
unifiedLoadStmt.init();
final StatementBase proxyStmt = unifiedLoadStmt.getProxyStmt();
parsedStmt = proxyStmt;
if (!(proxyStmt instanceof LoadStmt)) {
Preconditions.checkState(
parsedStmt instanceof InsertStmt && ((InsertStmt) parsedStmt).needLoadManager(),
new IllegalStateException("enable_unified_load=true, should be external insert stmt"));
}
}
try {
if (context.isTxnModel() && !(parsedStmt instanceof InsertStmt)
&& !(parsedStmt instanceof TransactionStmt)) {
throw new TException("This is in a transaction, only insert, commit, rollback is acceptable.");
}
analyzeVariablesInStmt();
if (!context.isTxnModel()) {
Span queryAnalysisSpan =
context.getTracer().spanBuilder("query analysis").setParent(Context.current()).startSpan();
try (Scope ignored = queryAnalysisSpan.makeCurrent()) {
analyze(context.getSessionVariable().toThrift());
} catch (Exception e) {
queryAnalysisSpan.recordException(e);
throw e;
} finally {
queryAnalysisSpan.end();
}
if (isForwardToMaster()) {
if (isProxy) {
throw new UserException("The statement has been forwarded to master FE("
+ Env.getCurrentEnv().getSelfNode().getIp() + ") and failed to execute"
+ " because Master FE is not ready. You may need to check FE's status");
}
forwardToMaster();
if (masterOpExecutor != null && masterOpExecutor.getQueryId() != null) {
context.setQueryId(masterOpExecutor.getQueryId());
}
return;
} else {
LOG.debug("no need to transfer to Master. stmt: {}", context.getStmtId());
}
} else {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.analyze(analyzer);
}
if (prepareStmt instanceof PrepareStmt && !isExecuteStmt) {
handlePrepareStmt();
return;
}
if (parsedStmt instanceof QueryStmt) {
if (!parsedStmt.isExplain()) {
if (checkBlockRules()) {
return;
}
}
handleQueryWithRetry(queryId);
} else if (parsedStmt instanceof SetStmt) {
handleSetStmt();
} else if (parsedStmt instanceof SwitchStmt) {
handleSwitchStmt();
} else if (parsedStmt instanceof UseStmt) {
handleUseStmt();
} else if (parsedStmt instanceof TransactionStmt) {
handleTransactionStmt();
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
handleCtasStmt();
} else if (parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.needLoadManager()) {
handleExternalInsertStmt();
} else {
try {
if (!insertStmt.getQueryStmt().isExplain()) {
profileType = ProfileType.LOAD;
}
handleInsertStmt();
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", t.getMessage());
throw t;
}
}
} else if (parsedStmt instanceof LoadStmt) {
handleLoadStmt();
} else if (parsedStmt instanceof UpdateStmt) {
handleUpdateStmt();
} else if (parsedStmt instanceof DdlStmt) {
if (parsedStmt instanceof DeleteStmt && ((DeleteStmt) parsedStmt).getFromClause() != null) {
handleDeleteStmt();
} else {
handleDdlStmt();
}
} else if (parsedStmt instanceof ShowStmt) {
handleShow();
} else if (parsedStmt instanceof KillStmt) {
handleKill();
} else if (parsedStmt instanceof ExportStmt) {
handleExportStmt();
} else if (parsedStmt instanceof UnlockTablesStmt) {
handleUnlockTablesStmt();
} else if (parsedStmt instanceof LockTablesStmt) {
handleLockTablesStmt();
} else if (parsedStmt instanceof UnsupportedStmt) {
handleUnsupportedStmt();
} else {
context.getState().setError(ErrorCode.ERR_NOT_SUPPORTED_YET, "Do not support this query.");
}
} catch (IOException e) {
LOG.warn("execute IOException. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, e.getMessage());
throw e;
} catch (UserException e) {
LOG.warn("execute Exception. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
} catch (Exception e) {
LOG.warn("execute Exception. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR,
e.getClass().getSimpleName() + ", msg: " + Util.getRootCauseMessage(e));
if (parsedStmt instanceof KillStmt) {
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
}
} finally {
if (!context.isTxnModel() && parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (!insertStmt.needLoadManager() && insertStmt.isTransactionBegin()
&& context.getState().getStateType() == MysqlStateType.ERR) {
try {
String errMsg = Strings.emptyToNull(context.getState().getErrorMessage());
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
(errMsg == null ? "unknown reason" : errMsg));
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn. {}", context.getQueryIdentifier(), abortTxnException);
}
}
}
}
} | class StmtExecutor implements ProfileWriter {
private static final Logger LOG = LogManager.getLogger(StmtExecutor.class);
private static final AtomicLong STMT_ID_GENERATOR = new AtomicLong(0);
private static final int MAX_DATA_TO_SEND_FOR_TXN = 100;
private static final String NULL_VALUE_FOR_LOAD = "\\N";
private final Object writeProfileLock = new Object();
private ConnectContext context;
private final StatementContext statementContext;
private MysqlSerializer serializer;
private OriginStatement originStmt;
private StatementBase parsedStmt;
private Analyzer analyzer;
private RuntimeProfile profile;
private RuntimeProfile summaryProfile;
private RuntimeProfile plannerRuntimeProfile;
private volatile boolean isFinishedProfile = false;
private String queryType = "Query";
private volatile Coordinator coord = null;
private MasterOpExecutor masterOpExecutor = null;
private RedirectStatus redirectStatus = null;
private Planner planner;
private boolean isProxy;
private ShowResultSet proxyResultSet = null;
private Data.PQueryStatistics.Builder statisticsForAuditLog;
private boolean isCached;
private QueryPlannerProfile plannerProfile = new QueryPlannerProfile();
private String stmtName;
private PrepareStmt prepareStmt = null;
private String mysqlLoadId;
private boolean isExecuteStmt = false;
private static final CommonResultSetMetaData DRY_RUN_QUERY_METADATA = new CommonResultSetMetaData(
Lists.newArrayList(new Column("ReturnedRows", PrimitiveType.STRING)));
public StmtExecutor(ConnectContext context, OriginStatement originStmt, boolean isProxy) {
this.context = context;
this.originStmt = originStmt;
this.serializer = context.getMysqlChannel().getSerializer();
this.isProxy = isProxy;
this.statementContext = new StatementContext(context, originStmt);
this.context.setStatementContext(statementContext);
}
public StmtExecutor(ConnectContext context, String stmt) {
this(context, new OriginStatement(stmt, 0), false);
this.stmtName = stmt;
}
public StmtExecutor(ConnectContext ctx, StatementBase parsedStmt) {
this.context = ctx;
this.parsedStmt = parsedStmt;
this.originStmt = parsedStmt.getOrigStmt();
this.serializer = context.getMysqlChannel().getSerializer();
this.isProxy = false;
if (parsedStmt instanceof LogicalPlanAdapter) {
this.statementContext = ((LogicalPlanAdapter) parsedStmt).getStatementContext();
this.statementContext.setConnectContext(ctx);
this.statementContext.setOriginStatement(originStmt);
this.statementContext.setParsedStatement(parsedStmt);
} else {
this.statementContext = new StatementContext(ctx, originStmt);
this.statementContext.setParsedStatement(parsedStmt);
}
this.context.setStatementContext(statementContext);
}
private static InternalService.PDataRow getRowStringValue(List<Expr> cols) throws UserException {
if (cols.isEmpty()) {
return null;
}
InternalService.PDataRow.Builder row = InternalService.PDataRow.newBuilder();
for (Expr expr : cols) {
if (!expr.isLiteralOrCastExpr()) {
throw new UserException(
"do not support non-literal expr in transactional insert operation: " + expr.toSql());
}
if (expr instanceof NullLiteral) {
row.addColBuilder().setValue(NULL_VALUE_FOR_LOAD);
} else if (expr instanceof ArrayLiteral) {
row.addColBuilder().setValue(expr.getStringValueForArray());
} else {
row.addColBuilder().setValue(expr.getStringValue());
}
}
return row.build();
}
private void initProfile(QueryPlannerProfile plannerProfile, boolean waiteBeReport) {
RuntimeProfile queryProfile;
if (coord == null) {
queryProfile = new RuntimeProfile("Execution Profile " + DebugUtil.printId(context.queryId()));
} else {
queryProfile = coord.getQueryProfile();
}
if (profile == null) {
profile = new RuntimeProfile("Query");
summaryProfile = new RuntimeProfile("Summary");
profile.addChild(summaryProfile);
summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(context.getStartTime()));
updateSummaryProfile(waiteBeReport);
for (Map.Entry<String, String> entry : getSummaryInfo().entrySet()) {
summaryProfile.addInfoString(entry.getKey(), entry.getValue());
}
summaryProfile.addInfoString(ProfileManager.TRACE_ID, context.getSessionVariable().getTraceId());
plannerRuntimeProfile = new RuntimeProfile("Execution Summary");
summaryProfile.addChild(plannerRuntimeProfile);
profile.addChild(queryProfile);
} else {
updateSummaryProfile(waiteBeReport);
}
plannerProfile.initRuntimeProfile(plannerRuntimeProfile);
queryProfile.getCounterTotalTime().setValue(TimeUtils.getEstimatedTime(plannerProfile.getQueryBeginTime()));
endProfile(waiteBeReport);
}
private void endProfile(boolean waitProfileDone) {
if (context != null && context.getSessionVariable().enableProfile() && coord != null) {
coord.endProfile(waitProfileDone);
}
}
private void updateSummaryProfile(boolean waiteBeReport) {
Preconditions.checkNotNull(summaryProfile);
long currentTimestamp = System.currentTimeMillis();
long totalTimeMs = currentTimestamp - context.getStartTime();
summaryProfile.addInfoString(ProfileManager.END_TIME,
waiteBeReport ? TimeUtils.longToTimeString(currentTimestamp) : "N/A");
summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs));
summaryProfile.addInfoString(ProfileManager.QUERY_STATE,
!waiteBeReport && context.getState().getStateType().equals(MysqlStateType.OK) ? "RUNNING" :
context.getState().toString());
}
private Map<String, String> getSummaryInfo() {
Map<String, String> infos = Maps.newLinkedHashMap();
infos.put(ProfileManager.JOB_ID, "N/A");
infos.put(ProfileManager.QUERY_ID, DebugUtil.printId(context.queryId()));
infos.put(ProfileManager.QUERY_TYPE, queryType);
infos.put(ProfileManager.DORIS_VERSION, Version.DORIS_BUILD_VERSION);
infos.put(ProfileManager.USER, context.getQualifiedUser());
infos.put(ProfileManager.DEFAULT_DB, context.getDatabase());
infos.put(ProfileManager.SQL_STATEMENT, originStmt.originStmt);
infos.put(ProfileManager.IS_CACHED, isCached ? "Yes" : "No");
Map<String, Integer> beToInstancesNum =
coord == null ? Maps.newTreeMap() : coord.getBeToInstancesNum();
infos.put(ProfileManager.TOTAL_INSTANCES_NUM,
String.valueOf(beToInstancesNum.values().stream().reduce(0, Integer::sum)));
infos.put(ProfileManager.INSTANCES_NUM_PER_BE, beToInstancesNum.toString());
infos.put(ProfileManager.PARALLEL_FRAGMENT_EXEC_INSTANCE,
String.valueOf(context.sessionVariable.parallelExecInstanceNum));
return infos;
}
public void addProfileToSpan() {
Span span = Span.fromContext(Context.current());
if (!span.isRecording()) {
return;
}
for (Map.Entry<String, String> entry : getSummaryInfo().entrySet()) {
span.setAttribute(entry.getKey(), entry.getValue());
}
}
public Planner planner() {
return planner;
}
public boolean isForwardToMaster() {
if (Env.getCurrentEnv().isMaster()) {
return false;
}
if (isQuery() && !Env.getCurrentEnv().isMaster()
&& !Env.getCurrentEnv().canRead()) {
return true;
}
if (redirectStatus == null) {
return false;
} else {
return redirectStatus.isForwardToMaster();
}
}
public ByteBuffer getOutputPacket() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getOutputPacket();
}
}
public ShowResultSet getProxyResultSet() {
return proxyResultSet;
}
public ShowResultSet getShowResultSet() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getProxyResultSet();
}
}
public String getProxyStatus() {
if (masterOpExecutor == null) {
return MysqlStateType.UNKNOWN.name();
}
return masterOpExecutor.getProxyStatus();
}
public boolean isInsertStmt() {
return parsedStmt != null && parsedStmt instanceof InsertStmt;
}
/**
* Used for audit in ConnectProcessor.
* <p>
* TODO: There are three interface in StatementBase be called when doing audit:
* toDigest needAuditEncryption when parsedStmt is not a query
* and isValuesOrConstantSelect when parsedStmt is instance of InsertStmt.
* toDigest: is used to compute Statement fingerprint for blocking some queries
* needAuditEncryption: when this interface return true,
* log statement use toSql function instead of log original string
* isValuesOrConstantSelect: when this interface return true, original string is truncated at 1024
*
* @return parsed and analyzed statement for Stale planner.
* an unresolved LogicalPlan wrapped with a LogicalPlanAdapter for Nereids.
*/
public StatementBase getParsedStmt() {
return parsedStmt;
}
public void execute() throws Exception {
UUID uuid = UUID.randomUUID();
TUniqueId queryId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
execute(queryId);
}
public void execute(TUniqueId queryId) throws Exception {
SessionVariable sessionVariable = context.getSessionVariable();
Span executeSpan = context.getTracer().spanBuilder("execute").setParent(Context.current()).startSpan();
try (Scope scope = executeSpan.makeCurrent()) {
if (parsedStmt instanceof LogicalPlanAdapter
|| (parsedStmt == null && sessionVariable.isEnableNereidsPlanner())) {
try {
executeByNereids(queryId);
} catch (NereidsException e) {
LOG.warn("nereids cannot process statement\n" + originStmt.originStmt
+ "\n because of " + e.getMessage(), e);
if (!context.getSessionVariable().enableFallbackToOriginalPlanner) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw e.getException();
}
LOG.info("fall back to legacy planner");
parsedStmt = null;
context.getState().setNereids(false);
executeByLegacy(queryId);
}
} else {
executeByLegacy(queryId);
}
} finally {
executeSpan.end();
try {
VariableMgr.revertSessionValue(sessionVariable);
sessionVariable.setIsSingleSetVar(false);
sessionVariable.clearSessionOriginValue();
} catch (DdlException e) {
LOG.warn("failed to revert Session value. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
}
}
}
private boolean checkBlockRules() throws AnalysisException {
Env.getCurrentEnv().getSqlBlockRuleMgr().matchSql(
originStmt.originStmt, context.getSqlHash(), context.getQualifiedUser());
List<ScanNode> scanNodeList = planner.getScanNodes();
for (ScanNode scanNode : scanNodeList) {
if (scanNode instanceof OlapScanNode) {
OlapScanNode olapScanNode = (OlapScanNode) scanNode;
Env.getCurrentEnv().getSqlBlockRuleMgr().checkLimitations(
olapScanNode.getSelectedPartitionNum().longValue(),
olapScanNode.getSelectedTabletsNum(),
olapScanNode.getCardinality(),
context.getQualifiedUser());
}
}
return false;
}
private void executeByNereids(TUniqueId queryId) throws Exception {
LOG.info("Nereids start to execute query:\n {}", originStmt.originStmt);
context.setQueryId(queryId);
context.setStartTime();
plannerProfile.setQueryBeginTime();
context.setStmtId(STMT_ID_GENERATOR.incrementAndGet());
parseByNereids();
Preconditions.checkState(parsedStmt instanceof LogicalPlanAdapter,
"Nereids only process LogicalPlanAdapter, but parsedStmt is " + parsedStmt.getClass().getName());
context.getState().setNereids(true);
LogicalPlan logicalPlan = ((LogicalPlanAdapter) parsedStmt).getLogicalPlan();
if (logicalPlan instanceof Command) {
if (logicalPlan instanceof Forward) {
redirectStatus = ((Forward) logicalPlan).toRedirectStatus();
if (isForwardToMaster()) {
if (isProxy) {
throw new NereidsException(new UserException("The statement has been forwarded to master FE("
+ Env.getCurrentEnv().getSelfNode().getIp() + ") and failed to execute"
+ " because Master FE is not ready. You may need to check FE's status"));
}
forwardToMaster();
if (masterOpExecutor != null && masterOpExecutor.getQueryId() != null) {
context.setQueryId(masterOpExecutor.getQueryId());
}
return;
}
}
try {
((Command) logicalPlan).run(context, this);
} catch (QueryStateException e) {
LOG.warn("", e);
context.setState(e.getQueryState());
throw new NereidsException(e);
} catch (UserException e) {
LOG.warn("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
throw new NereidsException("DDL statement(" + originStmt.originStmt + ") process failed", e);
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
throw new NereidsException("DDL statement(" + originStmt.originStmt + ") process failed.", e);
}
} else {
context.getState().setIsQuery(true);
if (context.getSessionVariable().enableProfile) {
ConnectContext.get().setStatsErrorEstimator(new StatsErrorEstimator());
}
planner = new NereidsPlanner(statementContext);
try {
planner.plan(parsedStmt, context.getSessionVariable().toThrift());
} catch (Exception e) {
LOG.warn("Nereids plan query failed:\n{}", originStmt.originStmt);
throw new NereidsException(new AnalysisException("Unexpected exception: " + e.getMessage(), e));
}
if (checkBlockRules()) {
return;
}
plannerProfile.setQueryPlanFinishTime();
handleQueryWithRetry(queryId);
}
}
private void parseByNereids() {
if (parsedStmt != null) {
return;
}
List<StatementBase> statements = new NereidsParser().parseSQL(originStmt.originStmt);
if (statements.size() <= originStmt.idx) {
throw new NereidsException(
new AnalysisException("Nereids parse failed. Parser get " + statements.size() + " statements,"
+ " but we need at least " + originStmt.idx + " statements."));
}
parsedStmt = statements.get(originStmt.idx);
}
private void handleQueryWithRetry(TUniqueId queryId) throws Exception {
int retryTime = Config.max_query_retry_time;
for (int i = 0; i < retryTime; i++) {
try {
if (i > 0) {
UUID uuid = UUID.randomUUID();
TUniqueId newQueryId = new TUniqueId(uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits());
AuditLog.getQueryAudit().log("Query {} {} times with new query id: {}",
DebugUtil.printId(queryId), i, DebugUtil.printId(newQueryId));
context.setQueryId(newQueryId);
}
handleQueryStmt();
break;
} catch (RpcException e) {
if (i == retryTime - 1) {
throw e;
}
if (!context.getMysqlChannel().isSend()) {
LOG.warn("retry {} times. stmt: {}", (i + 1), parsedStmt.getOrigStmt().originStmt);
} else {
throw e;
}
} finally {
endProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
}
/**
* get variables in stmt.
*
* @throws DdlException
*/
private void analyzeVariablesInStmt() throws DdlException {
analyzeVariablesInStmt(parsedStmt);
}
private void analyzeVariablesInStmt(StatementBase statement) throws DdlException {
SessionVariable sessionVariable = context.getSessionVariable();
if (statement instanceof SelectStmt) {
SelectStmt selectStmt = (SelectStmt) statement;
Map<String, String> optHints = selectStmt.getSelectList().getOptHints();
if (optHints != null) {
sessionVariable.setIsSingleSetVar(true);
for (String key : optHints.keySet()) {
VariableMgr.setVar(sessionVariable, new SetVar(key, new StringLiteral(optHints.get(key))));
}
}
}
}
private boolean isQuery() {
return parsedStmt instanceof QueryStmt
|| (parsedStmt instanceof LogicalPlanAdapter
&& !(((LogicalPlanAdapter) parsedStmt).getLogicalPlan() instanceof Command));
}
private void forwardToMaster() throws Exception {
masterOpExecutor = new MasterOpExecutor(originStmt, context, redirectStatus, isQuery());
LOG.debug("need to transfer to Master. stmt: {}", context.getStmtId());
masterOpExecutor.execute();
if (parsedStmt instanceof SetStmt) {
SetStmt setStmt = (SetStmt) parsedStmt;
setStmt.modifySetVarsForExecute();
for (SetVar var : setStmt.getSetVars()) {
VariableMgr.setVarForNonMasterFE(context.getSessionVariable(), var);
}
}
}
@Override
public void writeProfile(boolean isLastWriteProfile) {
if (!context.getSessionVariable().enableProfile()) {
return;
}
synchronized (writeProfileLock) {
if (isFinishedProfile) {
return;
}
initProfile(plannerProfile, isLastWriteProfile);
profile.computeTimeInChildProfile();
ProfileManager.getInstance().pushProfile(profile);
isFinishedProfile = isLastWriteProfile;
}
}
public void analyze(TQueryOptions tQueryOptions) throws UserException {
if (LOG.isDebugEnabled()) {
LOG.debug("begin to analyze stmt: {}, forwarded stmt id: {}",
context.getStmtId(), context.getForwardedStmtId());
}
parseByLegacy();
boolean preparedStmtReanalyzed = false;
PrepareStmtContext preparedStmtCtx = null;
if (parsedStmt instanceof ExecuteStmt) {
ExecuteStmt execStmt = (ExecuteStmt) parsedStmt;
preparedStmtCtx = context.getPreparedStmt(execStmt.getName());
if (preparedStmtCtx == null) {
throw new UserException("Could not execute, since `" + execStmt.getName() + "` not exist");
}
preparedStmtCtx.stmt.asignValues(execStmt.getArgs());
parsedStmt = preparedStmtCtx.stmt.getInnerStmt();
planner = preparedStmtCtx.planner;
analyzer = preparedStmtCtx.analyzer;
prepareStmt = preparedStmtCtx.stmt;
Preconditions.checkState(parsedStmt.isAnalyzed());
LOG.debug("already prepared stmt: {}", preparedStmtCtx.stmtString);
isExecuteStmt = true;
if (!preparedStmtCtx.stmt.needReAnalyze()) {
return;
}
preparedStmtReanalyzed = true;
preparedStmtCtx.stmt.analyze(analyzer);
}
if (isForwardToMaster()) {
return;
}
analyzer = new Analyzer(context.getEnv(), context);
if (parsedStmt instanceof PrepareStmt || context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
if (context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
prepareStmt = new PrepareStmt(parsedStmt,
String.valueOf(context.getEnv().getNextStmtId()), true /*binary protocol*/);
} else {
prepareStmt = (PrepareStmt) parsedStmt;
}
prepareStmt.setContext(context);
prepareStmt.analyze(analyzer);
parsedStmt = prepareStmt.getInnerStmt();
}
if (parsedStmt instanceof ShowStmt) {
SelectStmt selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(analyzer);
if (selectStmt != null) {
setParsedStmt(selectStmt);
}
}
if (parsedStmt instanceof QueryStmt
|| (parsedStmt instanceof InsertStmt && !((InsertStmt) parsedStmt).isExternalLoad())
|| parsedStmt instanceof CreateTableAsSelectStmt) {
if (Config.enable_resource_group && context.sessionVariable.enablePipelineEngine()) {
analyzer.setResourceGroups(analyzer.getEnv().getResourceGroupMgr()
.getResourceGroup(context.sessionVariable.resourceGroup));
}
Map<Long, TableIf> tableMap = Maps.newTreeMap();
QueryStmt queryStmt;
Set<String> parentViewNameSet = Sets.newHashSet();
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
CreateTableAsSelectStmt parsedStmt = (CreateTableAsSelectStmt) this.parsedStmt;
queryStmt = parsedStmt.getQueryStmt();
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
insertStmt.getTables(analyzer, tableMap, parentViewNameSet);
}
List<TableIf> tables = Lists.newArrayList(tableMap.values());
int analyzeTimes = 2;
for (int i = 1; i <= analyzeTimes; i++) {
MetaLockUtils.readLockTables(tables);
try {
analyzeAndGenerateQueryPlan(tQueryOptions);
break;
} catch (MVSelectFailedException e) {
/*
* If there is MVSelectFailedException after the first planner,
* there will be error mv rewritten in query.
* So, the query should be reanalyzed without mv rewritten and planner again.
* Attention: Only error rewritten tuple is forbidden to mv rewrite in the second time.
*/
if (i == analyzeTimes) {
throw e;
} else {
resetAnalyzerAndStmt();
}
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
} finally {
MetaLockUtils.readUnlockTables(tables);
}
}
} else {
try {
parsedStmt.analyze(analyzer);
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
}
if (preparedStmtReanalyzed) {
LOG.debug("update planner and analyzer after prepared statement reanalyzed");
preparedStmtCtx.planner = planner;
preparedStmtCtx.analyzer = analyzer;
Preconditions.checkNotNull(preparedStmtCtx.stmt);
preparedStmtCtx.analyzer.setPrepareStmt(preparedStmtCtx.stmt);
}
}
private void parseByLegacy() throws AnalysisException, DdlException {
if (parsedStmt == null) {
SqlScanner input = new SqlScanner(new StringReader(originStmt.originStmt),
context.getSessionVariable().getSqlMode());
SqlParser parser = new SqlParser(input);
try {
StatementBase parsedStmt = setParsedStmt(SqlParserUtils.getStmt(parser, originStmt.idx));
parsedStmt.setOrigStmt(originStmt);
parsedStmt.setUserInfo(context.getCurrentUserIdentity());
} catch (Error e) {
LOG.info("error happened when parsing stmt {}, id: {}", originStmt, context.getStmtId(), e);
throw new AnalysisException("sql parsing error, please check your sql");
} catch (AnalysisException e) {
String syntaxError = parser.getErrorMsg(originStmt.originStmt);
LOG.info("analysis exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), syntaxError, e);
if (syntaxError == null) {
throw e;
} else {
throw new AnalysisException(syntaxError, e);
}
} catch (Exception e) {
LOG.info("unexpected exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), parser.getErrorMsg(originStmt.originStmt), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
analyzeVariablesInStmt();
}
redirectStatus = parsedStmt.getRedirectStatus();
}
private void analyzeAndGenerateQueryPlan(TQueryOptions tQueryOptions) throws UserException {
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = null;
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
}
if (parsedStmt instanceof InsertStmt) {
queryStmt = (QueryStmt) ((InsertStmt) parsedStmt).getQueryStmt();
}
if (queryStmt.getOrderByElements() != null && queryStmt.getOrderByElements().isEmpty()) {
queryStmt.removeOrderByElements();
}
}
parsedStmt.analyze(analyzer);
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
ExprRewriter rewriter = analyzer.getExprRewriter();
rewriter.reset();
if (context.getSessionVariable().isEnableFoldConstantByBe()) {
parsedStmt.foldConstant(rewriter, tQueryOptions);
}
ExplainOptions explainOptions = parsedStmt.getExplainOptions();
boolean reAnalyze = false;
parsedStmt.rewriteExprs(rewriter);
reAnalyze = rewriter.changed();
if (analyzer.containSubquery()) {
parsedStmt = setParsedStmt(StmtRewriter.rewrite(analyzer, parsedStmt));
reAnalyze = true;
}
if (parsedStmt instanceof SelectStmt) {
if (StmtRewriter.rewriteByPolicy(parsedStmt, analyzer)) {
reAnalyze = true;
}
}
if (parsedStmt instanceof SetOperationStmt) {
List<SetOperationStmt.SetOperand> operands = ((SetOperationStmt) parsedStmt).getOperands();
for (SetOperationStmt.SetOperand operand : operands) {
if (StmtRewriter.rewriteByPolicy(operand.getQueryStmt(), analyzer)) {
reAnalyze = true;
}
}
}
if (parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = ((InsertStmt) parsedStmt).getQueryStmt();
if (queryStmt != null && StmtRewriter.rewriteByPolicy(queryStmt, analyzer)) {
reAnalyze = true;
}
}
if (reAnalyze) {
List<Type> origResultTypes = Lists.newArrayList();
for (Expr e : parsedStmt.getResultExprs()) {
origResultTypes.add(e.getType());
}
List<String> origColLabels =
Lists.newArrayList(parsedStmt.getColLabels());
analyzer = new Analyzer(context.getEnv(), context);
if (prepareStmt != null) {
prepareStmt.reset();
prepareStmt.analyze(analyzer);
}
parsedStmt.reset();
parsedStmt.analyze(analyzer);
parsedStmt.castResultExprs(origResultTypes);
parsedStmt.setColLabels(origColLabels);
if (LOG.isTraceEnabled()) {
LOG.trace("rewrittenStmt: " + parsedStmt.toSql());
}
if (explainOptions != null) {
parsedStmt.setIsExplain(explainOptions);
}
}
}
plannerProfile.setQueryAnalysisFinishTime();
planner = new OriginalPlanner(analyzer);
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
planner.plan(parsedStmt, tQueryOptions);
}
plannerProfile.setQueryPlanFinishTime();
}
private void resetAnalyzerAndStmt() {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.reset();
if (parsedStmt instanceof QueryStmt) {
((QueryStmt) parsedStmt).resetSelectList();
}
if (parsedStmt instanceof InsertStmt) {
((InsertStmt) parsedStmt).getQueryStmt().resetSelectList();
}
}
public void cancel() {
Coordinator coordRef = coord;
if (coordRef != null) {
coordRef.cancel();
}
if (mysqlLoadId != null) {
Env.getCurrentEnv().getLoadManager().getMysqlLoadManager().cancelMySqlLoad(mysqlLoadId);
}
if (parsedStmt instanceof AnalyzeStmt) {
Env.getCurrentEnv().getAnalysisManager().cancelSyncTask(context);
}
}
private void handleKill() throws DdlException {
KillStmt killStmt = (KillStmt) parsedStmt;
int id = killStmt.getConnectionId();
ConnectContext killCtx = context.getConnectScheduler().getContext(id);
if (killCtx == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_NO_SUCH_THREAD, id);
}
if (context == killCtx) {
context.setKilled();
} else {
if (!killCtx.getQualifiedUser().equals(ConnectContext.get().getQualifiedUser())
&& !Env.getCurrentEnv().getAccessManager().checkGlobalPriv(ConnectContext.get(),
PrivPredicate.ADMIN)) {
ErrorReport.reportDdlException(ErrorCode.ERR_KILL_DENIED_ERROR, id);
}
killCtx.kill(killStmt.isConnectionKill());
}
context.getState().setOk();
}
private void handleSetStmt() {
try {
SetStmt setStmt = (SetStmt) parsedStmt;
SetExecutor executor = new SetExecutor(context, setStmt);
executor.execute();
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(ErrorCode.ERR_LOCAL_VARIABLE, e.getMessage());
return;
}
context.getState().setOk();
}
private boolean sendCachedValues(MysqlChannel channel, List<InternalService.PCacheValue> cacheValues,
SelectStmt selectStmt, boolean isSendFields, boolean isEos)
throws Exception {
RowBatch batch = null;
boolean isSend = isSendFields;
for (InternalService.PCacheValue value : cacheValues) {
TResultBatch resultBatch = new TResultBatch();
for (ByteString one : value.getRowsList()) {
resultBatch.addToRows(ByteBuffer.wrap(one.toByteArray()));
}
resultBatch.setPacketSeq(1);
resultBatch.setIsCompressed(false);
batch = new RowBatch();
batch.setBatch(resultBatch);
batch.setEos(true);
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
context.updateReturnRows(batch.getBatch().getRows().size());
}
if (isEos) {
if (batch != null) {
statisticsForAuditLog = batch.getQueryStatistics() == null
? null : batch.getQueryStatistics().toBuilder();
}
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
context.getState().setEof();
}
return isSend;
}
/**
* Handle the SelectStmt via Cache.
*/
private void handleCacheStmt(CacheAnalyzer cacheAnalyzer, MysqlChannel channel, SelectStmt selectStmt)
throws Exception {
InternalService.PFetchCacheResult cacheResult = cacheAnalyzer.getCacheData();
CacheMode mode = cacheAnalyzer.getCacheMode();
SelectStmt newSelectStmt = selectStmt;
boolean isSendFields = false;
if (cacheResult != null) {
isCached = true;
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Full) {
sendCachedValues(channel, cacheResult.getValuesList(), newSelectStmt, isSendFields, true);
return;
}
if (mode == CacheMode.Partition) {
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Left) {
isSendFields = sendCachedValues(channel, cacheResult.getValuesList(),
newSelectStmt, isSendFields, false);
}
newSelectStmt = cacheAnalyzer.getRewriteStmt();
newSelectStmt.reset();
analyzer = new Analyzer(context.getEnv(), context);
newSelectStmt.analyze(analyzer);
if (parsedStmt instanceof LogicalPlanAdapter) {
planner = new NereidsPlanner(statementContext);
} else {
planner = new OriginalPlanner(analyzer);
}
planner.plan(newSelectStmt, context.getSessionVariable().toThrift());
}
}
sendResult(false, isSendFields, newSelectStmt, channel, cacheAnalyzer, cacheResult);
}
private boolean handleSelectRequestInFe(SelectStmt parsedSelectStmt) throws IOException {
List<SelectListItem> selectItemList = parsedSelectStmt.getSelectList().getItems();
List<Column> columns = new ArrayList<>(selectItemList.size());
ResultSetMetaData metadata = new CommonResultSet.CommonResultSetMetaData(columns);
List<String> columnLabels = parsedSelectStmt.getColLabels();
List<String> data = new ArrayList<>();
for (int i = 0; i < selectItemList.size(); i++) {
SelectListItem item = selectItemList.get(i);
Expr expr = item.getExpr();
String columnName = columnLabels.get(i);
if (expr instanceof LiteralExpr) {
columns.add(new Column(columnName, expr.getType()));
if (expr instanceof NullLiteral) {
data.add(null);
} else if (expr instanceof FloatLiteral) {
data.add(LiteralUtils.getStringValue((FloatLiteral) expr));
} else if (expr instanceof DecimalLiteral) {
data.add(((DecimalLiteral) expr).getValue().toPlainString());
} else if (expr instanceof ArrayLiteral) {
data.add(LiteralUtils.getStringValue((ArrayLiteral) expr));
} else {
data.add(expr.getStringValue());
}
} else {
return false;
}
}
ResultSet resultSet = new CommonResultSet(metadata, Collections.singletonList(data));
sendResultSet(resultSet);
return true;
}
private void handleQueryStmt() throws Exception {
context.getMysqlChannel().reset();
Queriable queryStmt = (Queriable) parsedStmt;
QueryDetail queryDetail = new QueryDetail(context.getStartTime(),
DebugUtil.printId(context.queryId()),
context.getStartTime(), -1, -1,
QueryDetail.QueryMemState.RUNNING,
context.getDatabase(),
originStmt.originStmt);
context.setQueryDetail(queryDetail);
QueryDetailQueue.addOrUpdateQueryDetail(queryDetail);
if (queryStmt.isExplain()) {
String explainString = planner.getExplainString(queryStmt.getExplainOptions());
handleExplainStmt(explainString);
return;
}
if (parsedStmt instanceof SelectStmt && ((SelectStmt) parsedStmt).getTableRefs().isEmpty()) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (handleSelectRequestInFe(parsedSelectStmt)) {
return;
}
}
MysqlChannel channel = context.getMysqlChannel();
boolean isOutfileQuery = queryStmt.hasOutFileClause();
CacheAnalyzer cacheAnalyzer = new CacheAnalyzer(context, parsedStmt, planner);
if (cacheAnalyzer.enableCache() && !isOutfileQuery && queryStmt instanceof SelectStmt) {
handleCacheStmt(cacheAnalyzer, channel, (SelectStmt) queryStmt);
return;
}
if (parsedStmt instanceof SelectStmt) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (parsedSelectStmt.getLimit() == 0) {
LOG.info("ignore handle limit 0 ,sql:{}", parsedSelectStmt.toSql());
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
context.getState().setEof();
return;
}
}
sendResult(isOutfileQuery, false, queryStmt, channel, null, null);
}
private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable queryStmt, MysqlChannel channel,
CacheAnalyzer cacheAnalyzer, InternalService.PFetchCacheResult cacheResult) throws Exception {
RowBatch batch;
coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator());
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
coord.setProfileWriter(this);
Span queryScheduleSpan =
context.getTracer().spanBuilder("query schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
throw e;
} finally {
queryScheduleSpan.end();
}
plannerProfile.setQueryScheduleFinishTime();
writeProfile(false);
Span fetchResultSpan = context.getTracer().spanBuilder("fetch result").setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
plannerProfile.setTempStartTime();
batch = coord.getNext();
plannerProfile.freshFetchResultConsumeTime();
if (batch.getBatch() != null) {
if (cacheAnalyzer != null) {
cacheAnalyzer.copyRowBatch(batch);
}
plannerProfile.setTempStartTime();
if (!isSendFields) {
if (!isOutfileQuery) {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
isSendFields = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
plannerProfile.freshWriteResultConsumeTime();
context.updateReturnRows(batch.getBatch().getRows().size());
context.setResultAttachedInfo(batch.getBatch().getAttachedInfos());
}
if (batch.isEos()) {
break;
}
}
if (cacheAnalyzer != null) {
if (cacheResult != null && cacheAnalyzer.getHitRange() == Cache.HitRange.Right) {
isSendFields =
sendCachedValues(channel, cacheResult.getValuesList(), (SelectStmt) queryStmt, isSendFields,
false);
}
cacheAnalyzer.updateCache();
}
if (!isSendFields) {
if (!isOutfileQuery) {
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().dryRunQuery) {
List<String> data = Lists.newArrayList(batch.getQueryStatistics() == null ? "0"
: batch.getQueryStatistics().getReturnedRows() + "");
ResultSet resultSet = new CommonResultSet(DRY_RUN_QUERY_METADATA,
Collections.singletonList(data));
sendResultSet(resultSet);
return;
} else {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
}
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
}
statisticsForAuditLog = batch.getQueryStatistics() == null ? null : batch.getQueryStatistics().toBuilder();
context.getState().setEof();
plannerProfile.setQueryFetchResultFinishTime();
} catch (Exception e) {
LOG.warn("cancel fragment query_id:{} cause {}", DebugUtil.printId(context.queryId()), e.getMessage());
coord.cancel(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
fetchResultSpan.recordException(e);
throw e;
} finally {
fetchResultSpan.end();
}
}
private TWaitingTxnStatusResult getWaitingTxnStatus(TWaitingTxnStatusRequest request) throws Exception {
TWaitingTxnStatusResult statusResult = null;
if (Env.getCurrentEnv().isMaster()) {
statusResult = Env.getCurrentGlobalTransactionMgr()
.getWaitingTxnStatus(request);
} else {
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
statusResult = masterTxnExecutor.getWaitingTxnStatus(request);
}
return statusResult;
}
private void handleTransactionStmt() throws Exception {
context.getMysqlChannel().reset();
context.getState().setOk(0, 0, "");
if (context.getTxnEntry() != null && context.getTxnEntry().getRowsInTransaction() == 0
&& (parsedStmt instanceof TransactionCommitStmt || parsedStmt instanceof TransactionRollbackStmt)) {
context.setTxnEntry(null);
} else if (parsedStmt instanceof TransactionBeginStmt) {
if (context.isTxnModel()) {
LOG.info("A transaction has already begin");
return;
}
TTxnParams txnParams = new TTxnParams();
txnParams.setNeedTxn(true).setEnablePipelineTxnLoad(Config.enable_pipeline_load)
.setThriftRpcTimeoutMs(5000).setTxnId(-1).setDb("").setTbl("");
if (context.getSessionVariable().getEnableInsertStrict()) {
txnParams.setMaxFilterRatio(0);
} else {
txnParams.setMaxFilterRatio(1.0);
}
if (context.getTxnEntry() == null) {
context.setTxnEntry(new TransactionEntry());
}
context.getTxnEntry().setTxnConf(txnParams);
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.PREPARE.name());
sb.append("', 'txnId':'").append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} else if (parsedStmt instanceof TransactionCommitStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to commit");
return;
}
TTxnParams txnConf = context.getTxnEntry().getTxnConf();
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
if (context.getTxnEntry().getDataToSend().size() > 0) {
executor.sendData();
}
executor.commitTransaction();
TWaitingTxnStatusRequest request = new TWaitingTxnStatusRequest();
request.setDbId(txnConf.getDbId()).setTxnId(txnConf.getTxnId());
request.setLabelIsSet(false);
request.setTxnIdIsSet(true);
TWaitingTxnStatusResult statusResult = getWaitingTxnStatus(request);
TransactionStatus txnStatus = TransactionStatus.valueOf(statusResult.getTxnStatusId());
if (txnStatus == TransactionStatus.COMMITTED) {
throw new AnalysisException("transaction commit successfully, BUT data will be visible later.");
} else if (txnStatus != TransactionStatus.VISIBLE) {
String errMsg = "commit failed, rollback.";
if (statusResult.getStatus().isSetErrorMsgs()
&& statusResult.getStatus().getErrorMsgs().size() > 0) {
errMsg = String.join(". ", statusResult.getStatus().getErrorMsgs());
}
throw new AnalysisException(errMsg);
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(txnStatus.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
LOG.warn("Txn commit failed", e);
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else if (parsedStmt instanceof TransactionRollbackStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to rollback");
return;
}
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
executor.abortTransaction();
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.ABORTED.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else {
throw new TException("parsedStmt type is not TransactionStmt");
}
}
private int executeForTxn(InsertStmt insertStmt)
throws UserException, TException, InterruptedException, ExecutionException, TimeoutException {
if (context.isTxnIniting()) {
beginTxn(insertStmt.getDbName(),
insertStmt.getTbl());
}
if (!context.getTxnEntry().getTxnConf().getDb().equals(insertStmt.getDbName())
|| !context.getTxnEntry().getTxnConf().getTbl().equals(insertStmt.getTbl())) {
throw new TException("Only one table can be inserted in one transaction.");
}
QueryStmt queryStmt = insertStmt.getQueryStmt();
if (!(queryStmt instanceof SelectStmt)) {
throw new TException("queryStmt is not SelectStmt, insert command error");
}
TransactionEntry txnEntry = context.getTxnEntry();
SelectStmt selectStmt = (SelectStmt) queryStmt;
int effectRows = 0;
if (selectStmt.getValueList() != null) {
Table tbl = txnEntry.getTable();
int schemaSize = tbl.getBaseSchema(false).size();
for (List<Expr> row : selectStmt.getValueList().getRows()) {
if (schemaSize != row.size()) {
throw new TException("Column count doesn't match value count");
}
}
for (List<Expr> row : selectStmt.getValueList().getRows()) {
++effectRows;
InternalService.PDataRow data = StmtExecutor.getRowStringValue(row);
if (data == null) {
continue;
}
List<InternalService.PDataRow> dataToSend = txnEntry.getDataToSend();
dataToSend.add(data);
if (dataToSend.size() >= MAX_DATA_TO_SEND_FOR_TXN) {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.sendData();
}
}
}
txnEntry.setRowsInTransaction(txnEntry.getRowsInTransaction() + effectRows);
return effectRows;
}
private void beginTxn(String dbName, String tblName) throws UserException, TException,
InterruptedException, ExecutionException, TimeoutException {
TransactionEntry txnEntry = context.getTxnEntry();
TTxnParams txnConf = txnEntry.getTxnConf();
SessionVariable sessionVariable = context.getSessionVariable();
long timeoutSecond = context.getExecTimeout();
TransactionState.LoadJobSourceType sourceType = TransactionState.LoadJobSourceType.INSERT_STREAMING;
Database dbObj = Env.getCurrentInternalCatalog()
.getDbOrException(dbName, s -> new TException("database is invalid for dbName: " + s));
Table tblObj = dbObj.getTableOrException(tblName, s -> new TException("table is invalid: " + s));
txnConf.setDbId(dbObj.getId()).setTbl(tblName).setDb(dbName);
txnEntry.setTable(tblObj);
txnEntry.setDb(dbObj);
String label = txnEntry.getLabel();
if (Env.getCurrentEnv().isMaster()) {
long txnId = Env.getCurrentGlobalTransactionMgr().beginTransaction(
txnConf.getDbId(), Lists.newArrayList(tblObj.getId()),
label, new TransactionState.TxnCoordinator(
TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
sourceType, timeoutSecond);
txnConf.setTxnId(txnId);
String token = Env.getCurrentEnv().getLoadManager().getTokenManager().acquireToken();
txnConf.setToken(token);
} else {
String token = Env.getCurrentEnv().getLoadManager().getTokenManager().acquireToken();
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
TLoadTxnBeginRequest request = new TLoadTxnBeginRequest();
request.setDb(txnConf.getDb()).setTbl(txnConf.getTbl()).setToken(token)
.setCluster(dbObj.getClusterName()).setLabel(label).setUser("").setUserIp("").setPasswd("");
TLoadTxnBeginResult result = masterTxnExecutor.beginTxn(request);
txnConf.setTxnId(result.getTxnId());
txnConf.setToken(token);
}
TStreamLoadPutRequest request = new TStreamLoadPutRequest();
long maxExecMemByte = sessionVariable.getMaxExecMemByte();
String timeZone = sessionVariable.getTimeZone();
int sendBatchParallelism = sessionVariable.getSendBatchParallelism();
request.setTxnId(txnConf.getTxnId()).setDb(txnConf.getDb())
.setTbl(txnConf.getTbl())
.setFileType(TFileType.FILE_STREAM).setFormatType(TFileFormatType.FORMAT_CSV_PLAIN)
.setMergeType(TMergeType.APPEND).setThriftRpcTimeoutMs(5000).setLoadId(context.queryId())
.setExecMemLimit(maxExecMemByte).setTimeout((int) timeoutSecond)
.setTimezone(timeZone).setSendBatchParallelism(sendBatchParallelism);
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.beginTransaction(request);
}
private void handleInsertStmt() throws Exception {
if (context.getMysqlChannel() != null) {
context.getMysqlChannel().reset();
}
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.getQueryStmt().hasOutFileClause()) {
throw new DdlException("Not support OUTFILE clause in INSERT statement");
}
if (insertStmt.getQueryStmt().isExplain()) {
ExplainOptions explainOptions = insertStmt.getQueryStmt().getExplainOptions();
insertStmt.setIsExplain(explainOptions);
String explainString = planner.getExplainString(explainOptions);
handleExplainStmt(explainString);
return;
}
analyzeVariablesInStmt(insertStmt.getQueryStmt());
long createTime = System.currentTimeMillis();
Throwable throwable = null;
long txnId = -1;
String label = "";
long loadedRows = 0;
int filteredRows = 0;
TransactionStatus txnStatus = TransactionStatus.ABORTED;
String errMsg = "";
TableType tblType = insertStmt.getTargetTable().getType();
if (context.isTxnModel()) {
if (insertStmt.getQueryStmt() instanceof SelectStmt) {
if (((SelectStmt) insertStmt.getQueryStmt()).getTableRefs().size() > 0) {
throw new TException("Insert into ** select is not supported in a transaction");
}
}
txnStatus = TransactionStatus.PREPARE;
loadedRows = executeForTxn(insertStmt);
label = context.getTxnEntry().getLabel();
txnId = context.getTxnEntry().getTxnConf().getTxnId();
} else {
label = insertStmt.getLabel();
LOG.info("Do insert [{}] with query id: {}", label, DebugUtil.printId(context.queryId()));
try {
coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator());
coord.setLoadZeroTolerance(context.getSessionVariable().getEnableInsertStrict());
coord.setQueryType(TQueryType.LOAD);
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(), coord);
coord.exec();
int execTimeout = context.getExecTimeout();
LOG.debug("Insert execution timeout:{}", execTimeout);
boolean notTimeout = coord.join(execTimeout);
if (!coord.isDone()) {
coord.cancel();
if (notTimeout) {
errMsg = coord.getExecStatus().getErrorMsg();
ErrorReport.reportDdlException("There exists unhealthy backend. "
+ errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_EXECUTE_TIMEOUT);
}
}
if (!coord.getExecStatus().ok()) {
errMsg = coord.getExecStatus().getErrorMsg();
LOG.warn("insert failed: {}", errMsg);
ErrorReport.reportDdlException(errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
}
LOG.debug("delta files is {}", coord.getDeltaUrls());
if (coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL) != null) {
loadedRows = Long.parseLong(coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL));
}
if (coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL) != null) {
filteredRows = Integer.parseInt(coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL));
}
if (context.getSessionVariable().getEnableInsertStrict()) {
if (filteredRows > 0) {
context.getState().setError(ErrorCode.ERR_FAILED_WHEN_INSERT,
"Insert has filtered data in strict mode, tracking_url=" + coord.getTrackingUrl());
return;
}
}
if (tblType != TableType.OLAP && tblType != TableType.MATERIALIZED_VIEW) {
context.getState().setOk(loadedRows, filteredRows, null);
return;
}
if (Env.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(
insertStmt.getDbObj(), Lists.newArrayList(insertStmt.getTargetTable()),
insertStmt.getTransactionId(),
TabletCommitInfo.fromThrift(coord.getCommitInfos()),
context.getSessionVariable().getInsertVisibleTimeoutMs())) {
txnStatus = TransactionStatus.VISIBLE;
} else {
txnStatus = TransactionStatus.COMMITTED;
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", label, t);
try {
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
t.getMessage() == null ? "unknown reason" : t.getMessage());
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn", abortTxnException);
}
if (!Config.using_old_load_usage_pattern) {
StringBuilder sb = new StringBuilder(t.getMessage());
if (!Strings.isNullOrEmpty(coord.getTrackingUrl())) {
sb.append(". url: " + coord.getTrackingUrl());
}
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, sb.toString());
return;
}
/*
* If config 'using_old_load_usage_pattern' is true.
* Doris will return a label to user, and user can use this label to check load job's status,
* which exactly like the old insert stmt usage pattern.
*/
throwable = t;
} finally {
endProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
txnId = insertStmt.getTransactionId();
try {
context.getEnv().getLoadManager()
.recordFinishedLoadJob(label, txnId, insertStmt.getDbName(),
insertStmt.getTargetTable().getId(),
EtlJobType.INSERT, createTime, throwable == null ? "" : throwable.getMessage(),
coord.getTrackingUrl(), insertStmt.getUserInfo());
} catch (MetaNotFoundException e) {
LOG.warn("Record info of insert load with error {}", e.getMessage(), e);
errMsg = "Record info of insert load with error " + e.getMessage();
}
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(label).append("', 'status':'").append(txnStatus.name());
sb.append("', 'txnId':'").append(txnId).append("'");
if (tblType == TableType.MATERIALIZED_VIEW) {
sb.append("', 'rows':'").append(loadedRows).append("'");
}
if (!Strings.isNullOrEmpty(errMsg)) {
sb.append(", 'err':'").append(errMsg).append("'");
}
sb.append("}");
context.getState().setOk(loadedRows, filteredRows, sb.toString());
context.setOrUpdateInsertResult(txnId, label, insertStmt.getDbName(), insertStmt.getTbl(),
txnStatus, loadedRows, filteredRows);
context.updateReturnRows((int) loadedRows);
}
private void handleExternalInsertStmt() {
try {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
LoadType loadType = insertStmt.getLoadType();
if (loadType == LoadType.UNKNOWN) {
throw new DdlException("Unknown load job type");
}
LoadManagerAdapter loadManagerAdapter = context.getEnv().getLoadManagerAdapter();
loadManagerAdapter.startLoadFromInsertStmt(insertStmt);
context.getState().setOk();
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleUnsupportedStmt() {
context.getMysqlChannel().reset();
context.getState().setOk();
}
private void handleSwitchStmt() throws AnalysisException {
SwitchStmt switchStmt = (SwitchStmt) parsedStmt;
try {
context.getEnv().changeCatalog(context, switchStmt.getCatalogName());
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handlePrepareStmt() throws Exception {
LOG.debug("add prepared statement {}, isBinaryProtocol {}",
prepareStmt.getName(), prepareStmt.isBinaryProtocol());
context.addPreparedStmt(prepareStmt.getName(),
new PrepareStmtContext(prepareStmt,
context, planner, analyzer, prepareStmt.getName()));
if (prepareStmt.isBinaryProtocol()) {
sendStmtPrepareOK();
}
context.getState().setOk();
}
private void handleUseStmt() throws AnalysisException {
UseStmt useStmt = (UseStmt) parsedStmt;
try {
if (Strings.isNullOrEmpty(useStmt.getClusterName())) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER);
}
if (useStmt.getCatalogName() != null) {
context.getEnv().changeCatalog(context, useStmt.getCatalogName());
}
context.getEnv().changeDb(context, useStmt.getDatabase());
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void sendMetaData(ResultSetMetaData metaData) throws IOException {
serializer.reset();
serializer.writeVInt(metaData.getColumnCount());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (Column col : metaData.getColumns()) {
serializer.reset();
serializer.writeField(col.getName(), col.getType());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
private void sendStmtPrepareOK() throws IOException {
serializer.reset();
serializer.writeInt1(0);
serializer.writeInt4(Integer.valueOf(prepareStmt.getName()));
int numColumns = 0;
serializer.writeInt2(numColumns);
int numParams = prepareStmt.getColLabelsOfPlaceHolders().size();
serializer.writeInt2(numParams);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
if (numParams > 0) {
sendFields(prepareStmt.getColLabelsOfPlaceHolders(),
exprToType(prepareStmt.getSlotRefOfPlaceHolders()));
}
context.getState().setOk();
}
private void sendFields(List<String> colNames, List<Type> types) throws IOException {
serializer.reset();
serializer.writeVInt(colNames.size());
LOG.debug("sendFields {}", colNames);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (int i = 0; i < colNames.size(); ++i) {
serializer.reset();
if (prepareStmt != null && isExecuteStmt) {
byte[] serializedField = prepareStmt.getSerializedField(colNames.get(i));
if (serializedField == null) {
serializer.writeField(colNames.get(i), types.get(i));
serializedField = serializer.toArray();
prepareStmt.setSerializedField(colNames.get(i), serializedField);
}
context.getMysqlChannel().sendOnePacket(ByteBuffer.wrap(serializedField));
} else {
serializer.writeField(colNames.get(i), types.get(i));
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
public void sendResultSet(ResultSet resultSet) throws IOException {
context.updateReturnRows(resultSet.getResultRows().size());
sendMetaData(resultSet.getMetaData());
for (List<String> row : resultSet.getResultRows()) {
serializer.reset();
for (String item : row) {
if (item == null || item.equals(FeConstants.null_string)) {
serializer.writeNull();
} else {
serializer.writeLenEncodedString(item);
}
}
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleShow() throws IOException, AnalysisException, DdlException {
ShowExecutor executor = new ShowExecutor(context, (ShowStmt) parsedStmt);
ShowResultSet resultSet = executor.execute();
if (resultSet == null) {
return;
}
if (isProxy) {
proxyResultSet = resultSet;
return;
}
sendResultSet(resultSet);
}
private void handleUnlockTablesStmt() {
}
private void handleLockTablesStmt() {
}
public void handleExplainStmt(String result) throws IOException {
ShowResultSetMetaData metaData =
ShowResultSetMetaData.builder()
.addColumn(new Column("Explain String", ScalarType.createVarchar(20)))
.build();
sendMetaData(metaData);
for (String item : result.split("\n")) {
serializer.reset();
serializer.writeLenEncodedString(item);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleLoadStmt() {
try {
LoadStmt loadStmt = (LoadStmt) parsedStmt;
EtlJobType jobType = loadStmt.getEtlJobType();
if (jobType == EtlJobType.UNKNOWN) {
throw new DdlException("Unknown load job type");
}
if (jobType == EtlJobType.HADOOP) {
throw new DdlException("Load job by hadoop cluster is disabled."
+ " Try using broker load. See 'help broker load;'");
}
LoadManager loadManager = context.getEnv().getLoadManager();
if (jobType == EtlJobType.LOCAL_FILE) {
if (!context.getCapability().supportClientLocalFile()) {
context.getState().setError(ErrorCode.ERR_NOT_ALLOWED_COMMAND, "This client is not support"
+ " to load client local file.");
return;
}
String loadId = UUID.randomUUID().toString();
mysqlLoadId = loadId;
LoadJobRowResult submitResult = loadManager.getMysqlLoadManager()
.executeMySqlLoadJobFromStmt(context, loadStmt, loadId);
context.getState().setOk(submitResult.getRecords(), submitResult.getWarnings(),
submitResult.toString());
} else {
loadManager.createLoadJobFromStmt(loadStmt);
context.getState().setOk();
}
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleUpdateStmt() {
try {
UpdateStmt updateStmt = (UpdateStmt) parsedStmt;
parsedStmt = updateStmt.getInsertStmt();
execute();
if (MysqlStateType.ERR.equals(context.getState().getStateType())) {
LOG.warn("update data error, stmt={}", updateStmt.toSql());
}
} catch (Exception e) {
LOG.warn("update data error, stmt={}", parsedStmt.toSql(), e);
}
}
private void handleDeleteStmt() {
try {
DeleteStmt deleteStmt = (DeleteStmt) parsedStmt;
parsedStmt = deleteStmt.getInsertStmt();
execute();
if (MysqlStateType.ERR.equals(context.getState().getStateType())) {
LOG.warn("delete data error, stmt={}", deleteStmt.toSql());
}
} catch (Exception e) {
LOG.warn("delete data error, stmt={}", parsedStmt.toSql(), e);
}
}
private void handleDdlStmt() {
try {
DdlExecutor.execute(context.getEnv(), (DdlStmt) parsedStmt);
if (!(parsedStmt instanceof AnalyzeStmt)) {
context.getState().setOk();
}
} catch (QueryStateException e) {
LOG.warn("", e);
context.setState(e.getQueryState());
} catch (UserException e) {
LOG.warn("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleEnterStmt() {
final EnterStmt enterStmt = (EnterStmt) parsedStmt;
try {
context.getEnv().changeCluster(context, enterStmt.getClusterName());
context.setDatabase("");
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handleExportStmt() throws Exception {
ExportStmt exportStmt = (ExportStmt) parsedStmt;
context.getEnv().getExportMgr().addExportJob(exportStmt);
}
private void handleCtasStmt() {
CreateTableAsSelectStmt ctasStmt = (CreateTableAsSelectStmt) this.parsedStmt;
try {
DdlExecutor.execute(context.getEnv(), ctasStmt);
context.getState().setOk();
} catch (Exception e) {
LOG.warn("CTAS create table error, stmt={}", originStmt.originStmt, e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
if (MysqlStateType.OK.equals(context.getState().getStateType())) {
try {
parsedStmt = ctasStmt.getInsertStmt();
parsedStmt.setUserInfo(context.getCurrentUserIdentity());
execute();
if (MysqlStateType.ERR.equals(context.getState().getStateType())) {
LOG.warn("CTAS insert data error, stmt={}", ctasStmt.toSql());
handleCtasRollback(ctasStmt.getCreateTableStmt().getDbTbl());
}
} catch (Exception e) {
LOG.warn("CTAS insert data error, stmt={}", ctasStmt.toSql(), e);
handleCtasRollback(ctasStmt.getCreateTableStmt().getDbTbl());
}
}
}
private void handleCtasRollback(TableName table) {
if (context.getSessionVariable().isDropTableIfCtasFailed()) {
DropTableStmt dropTableStmt = new DropTableStmt(true, table, true);
try {
DdlExecutor.execute(context.getEnv(), dropTableStmt);
} catch (Exception ex) {
LOG.warn("CTAS drop table error, stmt={}", parsedStmt.toSql(), ex);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + ex.getMessage());
}
}
}
public Data.PQueryStatistics getQueryStatisticsForAuditLog() {
if (statisticsForAuditLog == null) {
statisticsForAuditLog = Data.PQueryStatistics.newBuilder();
}
if (!statisticsForAuditLog.hasScanBytes()) {
statisticsForAuditLog.setScanBytes(0L);
}
if (!statisticsForAuditLog.hasScanRows()) {
statisticsForAuditLog.setScanRows(0L);
}
if (!statisticsForAuditLog.hasReturnedRows()) {
statisticsForAuditLog.setReturnedRows(0L);
}
if (!statisticsForAuditLog.hasCpuMs()) {
statisticsForAuditLog.setCpuMs(0L);
}
return statisticsForAuditLog.build();
}
private List<Type> exprToType(List<Expr> exprs) {
return exprs.stream().map(e -> e.getType()).collect(Collectors.toList());
}
public StatementBase setParsedStmt(StatementBase parsedStmt) {
this.parsedStmt = parsedStmt;
this.statementContext.setParsedStatement(parsedStmt);
return parsedStmt;
}
public List<ResultRow> executeInternalQuery() {
try {
List<ResultRow> resultRows = new ArrayList<>();
try {
if (ConnectContext.get() != null
&& ConnectContext.get().getSessionVariable().isEnableNereidsPlanner()) {
try {
parseByNereids();
Preconditions.checkState(parsedStmt instanceof LogicalPlanAdapter,
"Nereids only process LogicalPlanAdapter,"
+ " but parsedStmt is " + parsedStmt.getClass().getName());
context.getState().setNereids(true);
context.getState().setIsQuery(true);
planner = new NereidsPlanner(statementContext);
planner.plan(parsedStmt, context.getSessionVariable().toThrift());
} catch (Exception e) {
LOG.warn("fall back to legacy planner, because: {}", e.getMessage(), e);
parsedStmt = null;
context.getState().setNereids(false);
analyzer = new Analyzer(context.getEnv(), context);
analyze(context.getSessionVariable().toThrift());
}
} else {
analyzer = new Analyzer(context.getEnv(), context);
analyze(context.getSessionVariable().toThrift());
}
} catch (Exception e) {
LOG.warn("Internal SQL execution failed, SQL: {}", originStmt, e);
return resultRows;
}
planner.getFragments();
RowBatch batch;
coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator());
try {
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
} catch (UserException e) {
LOG.warn(e.getMessage(), e);
}
coord.setProfileWriter(this);
Span queryScheduleSpan = context.getTracer()
.spanBuilder("internal SQL schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
LOG.warn("Unexpected exception when SQL running", e);
} finally {
queryScheduleSpan.end();
}
Span fetchResultSpan = context.getTracer().spanBuilder("fetch internal SQL result")
.setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
batch = coord.getNext();
if (batch == null || batch.isEos()) {
return resultRows;
} else {
resultRows.addAll(convertResultBatchToResultRows(batch.getBatch()));
}
}
} catch (Exception e) {
LOG.warn("Unexpected exception when SQL running", e);
fetchResultSpan.recordException(e);
return resultRows;
} finally {
fetchResultSpan.end();
}
} finally {
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
private List<ResultRow> convertResultBatchToResultRows(TResultBatch batch) {
List<String> columns = parsedStmt.getColLabels();
List<PrimitiveType> types = parsedStmt.getResultExprs().stream()
.map(e -> e.getType().getPrimitiveType())
.collect(Collectors.toList());
List<ResultRow> resultRows = new ArrayList<>();
List<ByteBuffer> rows = batch.getRows();
for (ByteBuffer buffer : rows) {
List<String> values = Lists.newArrayList();
InternalQueryBuffer queryBuffer = new InternalQueryBuffer(buffer.slice());
for (int i = 0; i < columns.size(); i++) {
String value = queryBuffer.readStringWithLength();
values.add(value);
}
ResultRow resultRow = new ResultRow(columns, types, values);
resultRows.add(resultRow);
}
return resultRows;
}
public QueryPlannerProfile getPlannerProfile() {
return plannerProfile;
}
} | class StmtExecutor {
private static final Logger LOG = LogManager.getLogger(StmtExecutor.class);
private static final AtomicLong STMT_ID_GENERATOR = new AtomicLong(0);
private static final int MAX_DATA_TO_SEND_FOR_TXN = 100;
private static final String NULL_VALUE_FOR_LOAD = "\\N";
private final Object writeProfileLock = new Object();
private ConnectContext context;
private final StatementContext statementContext;
private MysqlSerializer serializer;
private OriginStatement originStmt;
private StatementBase parsedStmt;
private Analyzer analyzer;
private ProfileType profileType = ProfileType.QUERY;
private volatile Coordinator coord = null;
private MasterOpExecutor masterOpExecutor = null;
private RedirectStatus redirectStatus = null;
private Planner planner;
private boolean isProxy;
private ShowResultSet proxyResultSet = null;
private Data.PQueryStatistics.Builder statisticsForAuditLog;
private boolean isCached;
private String stmtName;
private PrepareStmt prepareStmt = null;
private String mysqlLoadId;
private boolean isExecuteStmt = false;
private final Profile profile;
private static final CommonResultSetMetaData DRY_RUN_QUERY_METADATA = new CommonResultSetMetaData(
Lists.newArrayList(new Column("ReturnedRows", PrimitiveType.STRING)));
public StmtExecutor(ConnectContext context, OriginStatement originStmt, boolean isProxy) {
this.context = context;
this.originStmt = originStmt;
this.serializer = context.getMysqlChannel().getSerializer();
this.isProxy = isProxy;
this.statementContext = new StatementContext(context, originStmt);
this.context.setStatementContext(statementContext);
this.profile = new Profile("Query", this.context.getSessionVariable().enableProfile);
}
public StmtExecutor(ConnectContext context, String stmt) {
this(context, new OriginStatement(stmt, 0), false);
this.stmtName = stmt;
}
public StmtExecutor(ConnectContext ctx, StatementBase parsedStmt) {
this.context = ctx;
this.parsedStmt = parsedStmt;
this.originStmt = parsedStmt.getOrigStmt();
this.serializer = context.getMysqlChannel().getSerializer();
this.isProxy = false;
if (parsedStmt instanceof LogicalPlanAdapter) {
this.statementContext = ((LogicalPlanAdapter) parsedStmt).getStatementContext();
this.statementContext.setConnectContext(ctx);
this.statementContext.setOriginStatement(originStmt);
this.statementContext.setParsedStatement(parsedStmt);
} else {
this.statementContext = new StatementContext(ctx, originStmt);
this.statementContext.setParsedStatement(parsedStmt);
}
this.context.setStatementContext(statementContext);
this.profile = new Profile("Query", context.getSessionVariable().enableProfile());
}
private static InternalService.PDataRow getRowStringValue(List<Expr> cols) throws UserException {
if (cols.isEmpty()) {
return null;
}
InternalService.PDataRow.Builder row = InternalService.PDataRow.newBuilder();
for (Expr expr : cols) {
if (!expr.isLiteralOrCastExpr()) {
throw new UserException(
"do not support non-literal expr in transactional insert operation: " + expr.toSql());
}
if (expr instanceof NullLiteral) {
row.addColBuilder().setValue(NULL_VALUE_FOR_LOAD);
} else if (expr instanceof ArrayLiteral) {
row.addColBuilder().setValue(expr.getStringValueForArray());
} else {
row.addColBuilder().setValue(expr.getStringValue());
}
}
return row.build();
}
private Map<String, String> getSummaryInfo(boolean isFinished) {
long currentTimestamp = System.currentTimeMillis();
SummaryBuilder builder = new SummaryBuilder();
builder.profileId(DebugUtil.printId(context.queryId()));
builder.taskType(profileType.name());
builder.startTime(TimeUtils.longToTimeString(context.getStartTime()));
if (isFinished) {
builder.endTime(TimeUtils.longToTimeString(currentTimestamp));
builder.totalTime(DebugUtil.getPrettyStringMs(currentTimestamp - context.getStartTime()));
}
builder.taskState(!isFinished && context.getState().getStateType().equals(MysqlStateType.OK) ? "RUNNING"
: context.getState().toString());
builder.user(context.getQualifiedUser());
builder.defaultDb(context.getDatabase());
builder.sqlStatement(originStmt.originStmt);
builder.isCached(isCached ? "Yes" : "No");
Map<String, Integer> beToInstancesNum = coord == null ? Maps.newTreeMap() : coord.getBeToInstancesNum();
builder.totalInstancesNum(String.valueOf(beToInstancesNum.values().stream().reduce(0, Integer::sum)));
builder.instancesNumPerBe(
beToInstancesNum.entrySet().stream().map(entry -> entry.getKey() + ":" + entry.getValue())
.collect(Collectors.joining(",")));
builder.parallelFragmentExecInstance(String.valueOf(context.sessionVariable.parallelExecInstanceNum));
builder.traceId(context.getSessionVariable().getTraceId());
return builder.build();
}
public void addProfileToSpan() {
Span span = Span.fromContext(Context.current());
if (!span.isRecording()) {
return;
}
for (Map.Entry<String, String> entry : getSummaryInfo(true).entrySet()) {
span.setAttribute(entry.getKey(), entry.getValue());
}
}
public Planner planner() {
return planner;
}
public boolean isForwardToMaster() {
if (Env.getCurrentEnv().isMaster()) {
return false;
}
if (isQuery() && !Env.getCurrentEnv().isMaster()
&& !Env.getCurrentEnv().canRead()) {
return true;
}
if (redirectStatus == null) {
return false;
} else {
return redirectStatus.isForwardToMaster();
}
}
public ByteBuffer getOutputPacket() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getOutputPacket();
}
}
public ShowResultSet getProxyResultSet() {
return proxyResultSet;
}
public ShowResultSet getShowResultSet() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getProxyResultSet();
}
}
public String getProxyStatus() {
if (masterOpExecutor == null) {
return MysqlStateType.UNKNOWN.name();
}
return masterOpExecutor.getProxyStatus();
}
public boolean isInsertStmt() {
return parsedStmt != null && parsedStmt instanceof InsertStmt;
}
/**
* Used for audit in ConnectProcessor.
* <p>
* TODO: There are three interface in StatementBase be called when doing audit:
* toDigest needAuditEncryption when parsedStmt is not a query
* and isValuesOrConstantSelect when parsedStmt is instance of InsertStmt.
* toDigest: is used to compute Statement fingerprint for blocking some queries
* needAuditEncryption: when this interface return true,
* log statement use toSql function instead of log original string
* isValuesOrConstantSelect: when this interface return true, original string is truncated at 1024
*
* @return parsed and analyzed statement for Stale planner.
* an unresolved LogicalPlan wrapped with a LogicalPlanAdapter for Nereids.
*/
public StatementBase getParsedStmt() {
return parsedStmt;
}
public void execute() throws Exception {
UUID uuid = UUID.randomUUID();
TUniqueId queryId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
execute(queryId);
}
public void execute(TUniqueId queryId) throws Exception {
SessionVariable sessionVariable = context.getSessionVariable();
Span executeSpan = context.getTracer().spanBuilder("execute").setParent(Context.current()).startSpan();
try (Scope scope = executeSpan.makeCurrent()) {
if (parsedStmt instanceof LogicalPlanAdapter
|| (parsedStmt == null && sessionVariable.isEnableNereidsPlanner())) {
try {
executeByNereids(queryId);
} catch (NereidsException e) {
LOG.warn("nereids cannot process statement\n" + originStmt.originStmt
+ "\n because of " + e.getMessage(), e);
if (!context.getSessionVariable().enableFallbackToOriginalPlanner) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw e.getException();
}
LOG.info("fall back to legacy planner");
parsedStmt = null;
context.getState().setNereids(false);
executeByLegacy(queryId);
}
} else {
executeByLegacy(queryId);
}
} finally {
executeSpan.end();
try {
VariableMgr.revertSessionValue(sessionVariable);
sessionVariable.setIsSingleSetVar(false);
sessionVariable.clearSessionOriginValue();
} catch (DdlException e) {
LOG.warn("failed to revert Session value. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
}
}
}
private boolean checkBlockRules() throws AnalysisException {
Env.getCurrentEnv().getSqlBlockRuleMgr().matchSql(
originStmt.originStmt, context.getSqlHash(), context.getQualifiedUser());
List<ScanNode> scanNodeList = planner.getScanNodes();
for (ScanNode scanNode : scanNodeList) {
if (scanNode instanceof OlapScanNode) {
OlapScanNode olapScanNode = (OlapScanNode) scanNode;
Env.getCurrentEnv().getSqlBlockRuleMgr().checkLimitations(
olapScanNode.getSelectedPartitionNum().longValue(),
olapScanNode.getSelectedTabletsNum(),
olapScanNode.getCardinality(),
context.getQualifiedUser());
}
}
return false;
}
private void executeByNereids(TUniqueId queryId) throws Exception {
LOG.info("Nereids start to execute query:\n {}", originStmt.originStmt);
context.setQueryId(queryId);
context.setStartTime();
profile.getSummaryProfile().setQueryBeginTime();
context.setStmtId(STMT_ID_GENERATOR.incrementAndGet());
parseByNereids();
Preconditions.checkState(parsedStmt instanceof LogicalPlanAdapter,
"Nereids only process LogicalPlanAdapter, but parsedStmt is " + parsedStmt.getClass().getName());
context.getState().setNereids(true);
LogicalPlan logicalPlan = ((LogicalPlanAdapter) parsedStmt).getLogicalPlan();
if (logicalPlan instanceof Command) {
if (logicalPlan instanceof Forward) {
redirectStatus = ((Forward) logicalPlan).toRedirectStatus();
if (isForwardToMaster()) {
if (isProxy) {
throw new NereidsException(new UserException("The statement has been forwarded to master FE("
+ Env.getCurrentEnv().getSelfNode().getIp() + ") and failed to execute"
+ " because Master FE is not ready. You may need to check FE's status"));
}
forwardToMaster();
if (masterOpExecutor != null && masterOpExecutor.getQueryId() != null) {
context.setQueryId(masterOpExecutor.getQueryId());
}
return;
}
}
try {
((Command) logicalPlan).run(context, this);
} catch (QueryStateException e) {
LOG.warn("", e);
context.setState(e.getQueryState());
throw new NereidsException(e);
} catch (UserException e) {
LOG.warn("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
throw new NereidsException("DDL statement(" + originStmt.originStmt + ") process failed", e);
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
throw new NereidsException("DDL statement(" + originStmt.originStmt + ") process failed.", e);
}
} else {
context.getState().setIsQuery(true);
if (context.getSessionVariable().enableProfile) {
ConnectContext.get().setStatsErrorEstimator(new StatsErrorEstimator());
}
planner = new NereidsPlanner(statementContext);
try {
planner.plan(parsedStmt, context.getSessionVariable().toThrift());
} catch (Exception e) {
LOG.warn("Nereids plan query failed:\n{}", originStmt.originStmt);
throw new NereidsException(new AnalysisException("Unexpected exception: " + e.getMessage(), e));
}
if (checkBlockRules()) {
return;
}
profile.getSummaryProfile().setQueryPlanFinishTime();
handleQueryWithRetry(queryId);
}
}
private void parseByNereids() {
if (parsedStmt != null) {
return;
}
List<StatementBase> statements = new NereidsParser().parseSQL(originStmt.originStmt);
if (statements.size() <= originStmt.idx) {
throw new NereidsException(
new AnalysisException("Nereids parse failed. Parser get " + statements.size() + " statements,"
+ " but we need at least " + originStmt.idx + " statements."));
}
parsedStmt = statements.get(originStmt.idx);
}
private void handleQueryWithRetry(TUniqueId queryId) throws Exception {
int retryTime = Config.max_query_retry_time;
for (int i = 0; i < retryTime; i++) {
try {
if (i > 0) {
UUID uuid = UUID.randomUUID();
TUniqueId newQueryId = new TUniqueId(uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits());
AuditLog.getQueryAudit().log("Query {} {} times with new query id: {}",
DebugUtil.printId(queryId), i, DebugUtil.printId(newQueryId));
context.setQueryId(newQueryId);
}
handleQueryStmt();
break;
} catch (RpcException e) {
if (i == retryTime - 1) {
throw e;
}
if (!context.getMysqlChannel().isSend()) {
LOG.warn("retry {} times. stmt: {}", (i + 1), parsedStmt.getOrigStmt().originStmt);
} else {
throw e;
}
} finally {
updateProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
}
/**
* get variables in stmt.
*
* @throws DdlException
*/
private void analyzeVariablesInStmt() throws DdlException {
analyzeVariablesInStmt(parsedStmt);
}
private void analyzeVariablesInStmt(StatementBase statement) throws DdlException {
SessionVariable sessionVariable = context.getSessionVariable();
if (statement instanceof SelectStmt) {
SelectStmt selectStmt = (SelectStmt) statement;
Map<String, String> optHints = selectStmt.getSelectList().getOptHints();
if (optHints != null) {
sessionVariable.setIsSingleSetVar(true);
for (String key : optHints.keySet()) {
VariableMgr.setVar(sessionVariable, new SetVar(key, new StringLiteral(optHints.get(key))));
}
}
}
}
private boolean isQuery() {
return parsedStmt instanceof QueryStmt
|| (parsedStmt instanceof LogicalPlanAdapter
&& !(((LogicalPlanAdapter) parsedStmt).getLogicalPlan() instanceof Command));
}
private void forwardToMaster() throws Exception {
masterOpExecutor = new MasterOpExecutor(originStmt, context, redirectStatus, isQuery());
LOG.debug("need to transfer to Master. stmt: {}", context.getStmtId());
masterOpExecutor.execute();
if (parsedStmt instanceof SetStmt) {
SetStmt setStmt = (SetStmt) parsedStmt;
setStmt.modifySetVarsForExecute();
for (SetVar var : setStmt.getSetVars()) {
VariableMgr.setVarForNonMasterFE(context.getSessionVariable(), var);
}
}
}
public void updateProfile(boolean isFinished) {
if (!context.getSessionVariable().enableProfile()) {
return;
}
profile.update(context.startTime, getSummaryInfo(isFinished), isFinished);
}
public void analyze(TQueryOptions tQueryOptions) throws UserException {
if (LOG.isDebugEnabled()) {
LOG.debug("begin to analyze stmt: {}, forwarded stmt id: {}", context.getStmtId(),
context.getForwardedStmtId());
}
parseByLegacy();
boolean preparedStmtReanalyzed = false;
PrepareStmtContext preparedStmtCtx = null;
if (parsedStmt instanceof ExecuteStmt) {
ExecuteStmt execStmt = (ExecuteStmt) parsedStmt;
preparedStmtCtx = context.getPreparedStmt(execStmt.getName());
if (preparedStmtCtx == null) {
throw new UserException("Could not execute, since `" + execStmt.getName() + "` not exist");
}
preparedStmtCtx.stmt.asignValues(execStmt.getArgs());
parsedStmt = preparedStmtCtx.stmt.getInnerStmt();
planner = preparedStmtCtx.planner;
analyzer = preparedStmtCtx.analyzer;
prepareStmt = preparedStmtCtx.stmt;
Preconditions.checkState(parsedStmt.isAnalyzed());
LOG.debug("already prepared stmt: {}", preparedStmtCtx.stmtString);
isExecuteStmt = true;
if (!preparedStmtCtx.stmt.needReAnalyze()) {
return;
}
preparedStmtReanalyzed = true;
preparedStmtCtx.stmt.analyze(analyzer);
}
if (isForwardToMaster()) {
return;
}
analyzer = new Analyzer(context.getEnv(), context);
if (parsedStmt instanceof PrepareStmt || context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
if (context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
prepareStmt = new PrepareStmt(parsedStmt,
String.valueOf(context.getEnv().getNextStmtId()), true /*binary protocol*/);
} else {
prepareStmt = (PrepareStmt) parsedStmt;
}
prepareStmt.setContext(context);
prepareStmt.analyze(analyzer);
parsedStmt = prepareStmt.getInnerStmt();
}
if (parsedStmt instanceof ShowStmt) {
SelectStmt selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(analyzer);
if (selectStmt != null) {
setParsedStmt(selectStmt);
}
}
if (parsedStmt instanceof QueryStmt
|| (parsedStmt instanceof InsertStmt && !((InsertStmt) parsedStmt).needLoadManager())
|| parsedStmt instanceof CreateTableAsSelectStmt) {
if (Config.enable_resource_group && context.sessionVariable.enablePipelineEngine()) {
analyzer.setResourceGroups(analyzer.getEnv().getResourceGroupMgr()
.getResourceGroup(context.sessionVariable.resourceGroup));
}
Map<Long, TableIf> tableMap = Maps.newTreeMap();
QueryStmt queryStmt;
Set<String> parentViewNameSet = Sets.newHashSet();
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
CreateTableAsSelectStmt parsedStmt = (CreateTableAsSelectStmt) this.parsedStmt;
queryStmt = parsedStmt.getQueryStmt();
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
insertStmt.getTables(analyzer, tableMap, parentViewNameSet);
}
List<TableIf> tables = Lists.newArrayList(tableMap.values());
int analyzeTimes = 2;
for (int i = 1; i <= analyzeTimes; i++) {
MetaLockUtils.readLockTables(tables);
try {
analyzeAndGenerateQueryPlan(tQueryOptions);
break;
} catch (MVSelectFailedException e) {
/*
* If there is MVSelectFailedException after the first planner,
* there will be error mv rewritten in query.
* So, the query should be reanalyzed without mv rewritten and planner again.
* Attention: Only error rewritten tuple is forbidden to mv rewrite in the second time.
*/
if (i == analyzeTimes) {
throw e;
} else {
resetAnalyzerAndStmt();
}
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
} finally {
MetaLockUtils.readUnlockTables(tables);
}
}
} else {
try {
parsedStmt.analyze(analyzer);
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
}
if (preparedStmtReanalyzed) {
LOG.debug("update planner and analyzer after prepared statement reanalyzed");
preparedStmtCtx.planner = planner;
preparedStmtCtx.analyzer = analyzer;
Preconditions.checkNotNull(preparedStmtCtx.stmt);
preparedStmtCtx.analyzer.setPrepareStmt(preparedStmtCtx.stmt);
}
}
private void parseByLegacy() throws AnalysisException, DdlException {
if (parsedStmt == null) {
SqlScanner input = new SqlScanner(new StringReader(originStmt.originStmt),
context.getSessionVariable().getSqlMode());
SqlParser parser = new SqlParser(input);
try {
StatementBase parsedStmt = setParsedStmt(SqlParserUtils.getStmt(parser, originStmt.idx));
parsedStmt.setOrigStmt(originStmt);
parsedStmt.setUserInfo(context.getCurrentUserIdentity());
} catch (Error e) {
LOG.info("error happened when parsing stmt {}, id: {}", originStmt, context.getStmtId(), e);
throw new AnalysisException("sql parsing error, please check your sql");
} catch (AnalysisException e) {
String syntaxError = parser.getErrorMsg(originStmt.originStmt);
LOG.info("analysis exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), syntaxError, e);
if (syntaxError == null) {
throw e;
} else {
throw new AnalysisException(syntaxError, e);
}
} catch (Exception e) {
LOG.info("unexpected exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), parser.getErrorMsg(originStmt.originStmt), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
analyzeVariablesInStmt();
}
redirectStatus = parsedStmt.getRedirectStatus();
}
private void analyzeAndGenerateQueryPlan(TQueryOptions tQueryOptions) throws UserException {
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = null;
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
}
if (parsedStmt instanceof InsertStmt) {
queryStmt = (QueryStmt) ((InsertStmt) parsedStmt).getQueryStmt();
}
if (queryStmt.getOrderByElements() != null && queryStmt.getOrderByElements().isEmpty()) {
queryStmt.removeOrderByElements();
}
}
parsedStmt.analyze(analyzer);
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
ExprRewriter rewriter = analyzer.getExprRewriter();
rewriter.reset();
if (context.getSessionVariable().isEnableFoldConstantByBe()) {
parsedStmt.foldConstant(rewriter, tQueryOptions);
}
ExplainOptions explainOptions = parsedStmt.getExplainOptions();
boolean reAnalyze = false;
parsedStmt.rewriteExprs(rewriter);
reAnalyze = rewriter.changed();
if (analyzer.containSubquery()) {
parsedStmt = setParsedStmt(StmtRewriter.rewrite(analyzer, parsedStmt));
reAnalyze = true;
}
if (parsedStmt instanceof SelectStmt) {
if (StmtRewriter.rewriteByPolicy(parsedStmt, analyzer)) {
reAnalyze = true;
}
}
if (parsedStmt instanceof SetOperationStmt) {
List<SetOperationStmt.SetOperand> operands = ((SetOperationStmt) parsedStmt).getOperands();
for (SetOperationStmt.SetOperand operand : operands) {
if (StmtRewriter.rewriteByPolicy(operand.getQueryStmt(), analyzer)) {
reAnalyze = true;
}
}
}
if (parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = ((InsertStmt) parsedStmt).getQueryStmt();
if (queryStmt != null && StmtRewriter.rewriteByPolicy(queryStmt, analyzer)) {
reAnalyze = true;
}
}
if (reAnalyze) {
List<Type> origResultTypes = Lists.newArrayList();
for (Expr e : parsedStmt.getResultExprs()) {
origResultTypes.add(e.getType());
}
List<String> origColLabels =
Lists.newArrayList(parsedStmt.getColLabels());
analyzer = new Analyzer(context.getEnv(), context);
if (prepareStmt != null) {
prepareStmt.reset();
prepareStmt.analyze(analyzer);
}
parsedStmt.reset();
parsedStmt.analyze(analyzer);
parsedStmt.castResultExprs(origResultTypes);
parsedStmt.setColLabels(origColLabels);
if (LOG.isTraceEnabled()) {
LOG.trace("rewrittenStmt: " + parsedStmt.toSql());
}
if (explainOptions != null) {
parsedStmt.setIsExplain(explainOptions);
}
}
}
profile.getSummaryProfile().setQueryAnalysisFinishTime();
planner = new OriginalPlanner(analyzer);
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
planner.plan(parsedStmt, tQueryOptions);
}
profile.getSummaryProfile().setQueryPlanFinishTime();
}
private void resetAnalyzerAndStmt() {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.reset();
if (parsedStmt instanceof QueryStmt) {
((QueryStmt) parsedStmt).resetSelectList();
}
if (parsedStmt instanceof InsertStmt) {
((InsertStmt) parsedStmt).getQueryStmt().resetSelectList();
}
}
public void cancel() {
Coordinator coordRef = coord;
if (coordRef != null) {
coordRef.cancel();
}
if (mysqlLoadId != null) {
Env.getCurrentEnv().getLoadManager().getMysqlLoadManager().cancelMySqlLoad(mysqlLoadId);
}
if (parsedStmt instanceof AnalyzeStmt) {
Env.getCurrentEnv().getAnalysisManager().cancelSyncTask(context);
}
}
private void handleKill() throws DdlException {
KillStmt killStmt = (KillStmt) parsedStmt;
int id = killStmt.getConnectionId();
ConnectContext killCtx = context.getConnectScheduler().getContext(id);
if (killCtx == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_NO_SUCH_THREAD, id);
}
if (context == killCtx) {
context.setKilled();
} else {
if (!killCtx.getQualifiedUser().equals(ConnectContext.get().getQualifiedUser())
&& !Env.getCurrentEnv().getAccessManager().checkGlobalPriv(ConnectContext.get(),
PrivPredicate.ADMIN)) {
ErrorReport.reportDdlException(ErrorCode.ERR_KILL_DENIED_ERROR, id);
}
killCtx.kill(killStmt.isConnectionKill());
}
context.getState().setOk();
}
private void handleSetStmt() {
try {
SetStmt setStmt = (SetStmt) parsedStmt;
SetExecutor executor = new SetExecutor(context, setStmt);
executor.execute();
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(ErrorCode.ERR_LOCAL_VARIABLE, e.getMessage());
return;
}
context.getState().setOk();
}
private boolean sendCachedValues(MysqlChannel channel, List<InternalService.PCacheValue> cacheValues,
SelectStmt selectStmt, boolean isSendFields, boolean isEos)
throws Exception {
RowBatch batch = null;
boolean isSend = isSendFields;
for (InternalService.PCacheValue value : cacheValues) {
TResultBatch resultBatch = new TResultBatch();
for (ByteString one : value.getRowsList()) {
resultBatch.addToRows(ByteBuffer.wrap(one.toByteArray()));
}
resultBatch.setPacketSeq(1);
resultBatch.setIsCompressed(false);
batch = new RowBatch();
batch.setBatch(resultBatch);
batch.setEos(true);
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
context.updateReturnRows(batch.getBatch().getRows().size());
}
if (isEos) {
if (batch != null) {
statisticsForAuditLog = batch.getQueryStatistics() == null
? null : batch.getQueryStatistics().toBuilder();
}
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
context.getState().setEof();
}
return isSend;
}
/**
* Handle the SelectStmt via Cache.
*/
private void handleCacheStmt(CacheAnalyzer cacheAnalyzer, MysqlChannel channel, SelectStmt selectStmt)
throws Exception {
InternalService.PFetchCacheResult cacheResult = cacheAnalyzer.getCacheData();
CacheMode mode = cacheAnalyzer.getCacheMode();
SelectStmt newSelectStmt = selectStmt;
boolean isSendFields = false;
if (cacheResult != null) {
isCached = true;
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Full) {
sendCachedValues(channel, cacheResult.getValuesList(), newSelectStmt, isSendFields, true);
return;
}
if (mode == CacheMode.Partition) {
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Left) {
isSendFields = sendCachedValues(channel, cacheResult.getValuesList(),
newSelectStmt, isSendFields, false);
}
newSelectStmt = cacheAnalyzer.getRewriteStmt();
newSelectStmt.reset();
analyzer = new Analyzer(context.getEnv(), context);
newSelectStmt.analyze(analyzer);
if (parsedStmt instanceof LogicalPlanAdapter) {
planner = new NereidsPlanner(statementContext);
} else {
planner = new OriginalPlanner(analyzer);
}
planner.plan(newSelectStmt, context.getSessionVariable().toThrift());
}
}
sendResult(false, isSendFields, newSelectStmt, channel, cacheAnalyzer, cacheResult);
}
private boolean handleSelectRequestInFe(SelectStmt parsedSelectStmt) throws IOException {
List<SelectListItem> selectItemList = parsedSelectStmt.getSelectList().getItems();
List<Column> columns = new ArrayList<>(selectItemList.size());
ResultSetMetaData metadata = new CommonResultSet.CommonResultSetMetaData(columns);
List<String> columnLabels = parsedSelectStmt.getColLabels();
List<String> data = new ArrayList<>();
for (int i = 0; i < selectItemList.size(); i++) {
SelectListItem item = selectItemList.get(i);
Expr expr = item.getExpr();
String columnName = columnLabels.get(i);
if (expr instanceof LiteralExpr) {
columns.add(new Column(columnName, expr.getType()));
if (expr instanceof NullLiteral) {
data.add(null);
} else if (expr instanceof FloatLiteral) {
data.add(LiteralUtils.getStringValue((FloatLiteral) expr));
} else if (expr instanceof DecimalLiteral) {
data.add(((DecimalLiteral) expr).getValue().toPlainString());
} else if (expr instanceof ArrayLiteral) {
data.add(LiteralUtils.getStringValue((ArrayLiteral) expr));
} else {
data.add(expr.getStringValue());
}
} else {
return false;
}
}
ResultSet resultSet = new CommonResultSet(metadata, Collections.singletonList(data));
sendResultSet(resultSet);
return true;
}
private void handleQueryStmt() throws Exception {
context.getMysqlChannel().reset();
Queriable queryStmt = (Queriable) parsedStmt;
QueryDetail queryDetail = new QueryDetail(context.getStartTime(),
DebugUtil.printId(context.queryId()),
context.getStartTime(), -1, -1,
QueryDetail.QueryMemState.RUNNING,
context.getDatabase(),
originStmt.originStmt);
context.setQueryDetail(queryDetail);
QueryDetailQueue.addOrUpdateQueryDetail(queryDetail);
if (queryStmt.isExplain()) {
String explainString = planner.getExplainString(queryStmt.getExplainOptions());
handleExplainStmt(explainString);
return;
}
if (parsedStmt instanceof SelectStmt && ((SelectStmt) parsedStmt).getTableRefs().isEmpty()) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (handleSelectRequestInFe(parsedSelectStmt)) {
return;
}
}
MysqlChannel channel = context.getMysqlChannel();
boolean isOutfileQuery = queryStmt.hasOutFileClause();
CacheAnalyzer cacheAnalyzer = new CacheAnalyzer(context, parsedStmt, planner);
if (cacheAnalyzer.enableCache() && !isOutfileQuery && queryStmt instanceof SelectStmt) {
handleCacheStmt(cacheAnalyzer, channel, (SelectStmt) queryStmt);
return;
}
if (parsedStmt instanceof SelectStmt) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (parsedSelectStmt.getLimit() == 0) {
LOG.info("ignore handle limit 0 ,sql:{}", parsedSelectStmt.toSql());
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
context.getState().setEof();
return;
}
}
sendResult(isOutfileQuery, false, queryStmt, channel, null, null);
}
private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable queryStmt, MysqlChannel channel,
CacheAnalyzer cacheAnalyzer, InternalService.PFetchCacheResult cacheResult) throws Exception {
RowBatch batch;
coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator());
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
profile.addExecutionProfile(coord.getExecutionProfile());
Span queryScheduleSpan =
context.getTracer().spanBuilder("query schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
throw e;
} finally {
queryScheduleSpan.end();
}
profile.getSummaryProfile().setQueryScheduleFinishTime();
updateProfile(false);
Span fetchResultSpan = context.getTracer().spanBuilder("fetch result").setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
profile.getSummaryProfile().setTempStartTime();
batch = coord.getNext();
profile.getSummaryProfile().freshFetchResultConsumeTime();
if (batch.getBatch() != null) {
if (cacheAnalyzer != null) {
cacheAnalyzer.copyRowBatch(batch);
}
profile.getSummaryProfile().setTempStartTime();
if (!isSendFields) {
if (!isOutfileQuery) {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
isSendFields = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
profile.getSummaryProfile().freshWriteResultConsumeTime();
context.updateReturnRows(batch.getBatch().getRows().size());
context.setResultAttachedInfo(batch.getBatch().getAttachedInfos());
}
if (batch.isEos()) {
break;
}
}
if (cacheAnalyzer != null) {
if (cacheResult != null && cacheAnalyzer.getHitRange() == Cache.HitRange.Right) {
isSendFields =
sendCachedValues(channel, cacheResult.getValuesList(), (SelectStmt) queryStmt, isSendFields,
false);
}
cacheAnalyzer.updateCache();
}
if (!isSendFields) {
if (!isOutfileQuery) {
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().dryRunQuery) {
List<String> data = Lists.newArrayList(batch.getQueryStatistics() == null ? "0"
: batch.getQueryStatistics().getReturnedRows() + "");
ResultSet resultSet = new CommonResultSet(DRY_RUN_QUERY_METADATA,
Collections.singletonList(data));
sendResultSet(resultSet);
return;
} else {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
}
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
}
statisticsForAuditLog = batch.getQueryStatistics() == null ? null : batch.getQueryStatistics().toBuilder();
context.getState().setEof();
profile.getSummaryProfile().setQueryFetchResultFinishTime();
} catch (Exception e) {
LOG.warn("cancel fragment query_id:{} cause {}", DebugUtil.printId(context.queryId()), e.getMessage());
coord.cancel(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
fetchResultSpan.recordException(e);
throw e;
} finally {
fetchResultSpan.end();
}
}
private TWaitingTxnStatusResult getWaitingTxnStatus(TWaitingTxnStatusRequest request) throws Exception {
TWaitingTxnStatusResult statusResult = null;
if (Env.getCurrentEnv().isMaster()) {
statusResult = Env.getCurrentGlobalTransactionMgr()
.getWaitingTxnStatus(request);
} else {
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
statusResult = masterTxnExecutor.getWaitingTxnStatus(request);
}
return statusResult;
}
private void handleTransactionStmt() throws Exception {
context.getMysqlChannel().reset();
context.getState().setOk(0, 0, "");
if (context.getTxnEntry() != null && context.getTxnEntry().getRowsInTransaction() == 0
&& (parsedStmt instanceof TransactionCommitStmt || parsedStmt instanceof TransactionRollbackStmt)) {
context.setTxnEntry(null);
} else if (parsedStmt instanceof TransactionBeginStmt) {
if (context.isTxnModel()) {
LOG.info("A transaction has already begin");
return;
}
TTxnParams txnParams = new TTxnParams();
txnParams.setNeedTxn(true).setEnablePipelineTxnLoad(Config.enable_pipeline_load)
.setThriftRpcTimeoutMs(5000).setTxnId(-1).setDb("").setTbl("");
if (context.getSessionVariable().getEnableInsertStrict()) {
txnParams.setMaxFilterRatio(0);
} else {
txnParams.setMaxFilterRatio(1.0);
}
if (context.getTxnEntry() == null) {
context.setTxnEntry(new TransactionEntry());
}
context.getTxnEntry().setTxnConf(txnParams);
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.PREPARE.name());
sb.append("', 'txnId':'").append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} else if (parsedStmt instanceof TransactionCommitStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to commit");
return;
}
TTxnParams txnConf = context.getTxnEntry().getTxnConf();
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
if (context.getTxnEntry().getDataToSend().size() > 0) {
executor.sendData();
}
executor.commitTransaction();
TWaitingTxnStatusRequest request = new TWaitingTxnStatusRequest();
request.setDbId(txnConf.getDbId()).setTxnId(txnConf.getTxnId());
request.setLabelIsSet(false);
request.setTxnIdIsSet(true);
TWaitingTxnStatusResult statusResult = getWaitingTxnStatus(request);
TransactionStatus txnStatus = TransactionStatus.valueOf(statusResult.getTxnStatusId());
if (txnStatus == TransactionStatus.COMMITTED) {
throw new AnalysisException("transaction commit successfully, BUT data will be visible later.");
} else if (txnStatus != TransactionStatus.VISIBLE) {
String errMsg = "commit failed, rollback.";
if (statusResult.getStatus().isSetErrorMsgs()
&& statusResult.getStatus().getErrorMsgs().size() > 0) {
errMsg = String.join(". ", statusResult.getStatus().getErrorMsgs());
}
throw new AnalysisException(errMsg);
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(txnStatus.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
LOG.warn("Txn commit failed", e);
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else if (parsedStmt instanceof TransactionRollbackStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to rollback");
return;
}
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
executor.abortTransaction();
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.ABORTED.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else {
throw new TException("parsedStmt type is not TransactionStmt");
}
}
private int executeForTxn(InsertStmt insertStmt)
throws UserException, TException, InterruptedException, ExecutionException, TimeoutException {
if (context.isTxnIniting()) {
beginTxn(insertStmt.getDbName(),
insertStmt.getTbl());
}
if (!context.getTxnEntry().getTxnConf().getDb().equals(insertStmt.getDbName())
|| !context.getTxnEntry().getTxnConf().getTbl().equals(insertStmt.getTbl())) {
throw new TException("Only one table can be inserted in one transaction.");
}
QueryStmt queryStmt = insertStmt.getQueryStmt();
if (!(queryStmt instanceof SelectStmt)) {
throw new TException("queryStmt is not SelectStmt, insert command error");
}
TransactionEntry txnEntry = context.getTxnEntry();
SelectStmt selectStmt = (SelectStmt) queryStmt;
int effectRows = 0;
if (selectStmt.getValueList() != null) {
Table tbl = txnEntry.getTable();
int schemaSize = tbl.getBaseSchema(false).size();
for (List<Expr> row : selectStmt.getValueList().getRows()) {
if (schemaSize != row.size()) {
throw new TException("Column count doesn't match value count");
}
}
for (List<Expr> row : selectStmt.getValueList().getRows()) {
++effectRows;
InternalService.PDataRow data = StmtExecutor.getRowStringValue(row);
if (data == null) {
continue;
}
List<InternalService.PDataRow> dataToSend = txnEntry.getDataToSend();
dataToSend.add(data);
if (dataToSend.size() >= MAX_DATA_TO_SEND_FOR_TXN) {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.sendData();
}
}
}
txnEntry.setRowsInTransaction(txnEntry.getRowsInTransaction() + effectRows);
return effectRows;
}
private void beginTxn(String dbName, String tblName) throws UserException, TException,
InterruptedException, ExecutionException, TimeoutException {
TransactionEntry txnEntry = context.getTxnEntry();
TTxnParams txnConf = txnEntry.getTxnConf();
SessionVariable sessionVariable = context.getSessionVariable();
long timeoutSecond = context.getExecTimeout();
TransactionState.LoadJobSourceType sourceType = TransactionState.LoadJobSourceType.INSERT_STREAMING;
Database dbObj = Env.getCurrentInternalCatalog()
.getDbOrException(dbName, s -> new TException("database is invalid for dbName: " + s));
Table tblObj = dbObj.getTableOrException(tblName, s -> new TException("table is invalid: " + s));
txnConf.setDbId(dbObj.getId()).setTbl(tblName).setDb(dbName);
txnEntry.setTable(tblObj);
txnEntry.setDb(dbObj);
String label = txnEntry.getLabel();
if (Env.getCurrentEnv().isMaster()) {
long txnId = Env.getCurrentGlobalTransactionMgr().beginTransaction(
txnConf.getDbId(), Lists.newArrayList(tblObj.getId()),
label, new TransactionState.TxnCoordinator(
TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
sourceType, timeoutSecond);
txnConf.setTxnId(txnId);
String token = Env.getCurrentEnv().getLoadManager().getTokenManager().acquireToken();
txnConf.setToken(token);
} else {
String token = Env.getCurrentEnv().getLoadManager().getTokenManager().acquireToken();
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
TLoadTxnBeginRequest request = new TLoadTxnBeginRequest();
request.setDb(txnConf.getDb()).setTbl(txnConf.getTbl()).setToken(token)
.setCluster(dbObj.getClusterName()).setLabel(label).setUser("").setUserIp("").setPasswd("");
TLoadTxnBeginResult result = masterTxnExecutor.beginTxn(request);
txnConf.setTxnId(result.getTxnId());
txnConf.setToken(token);
}
TStreamLoadPutRequest request = new TStreamLoadPutRequest();
long maxExecMemByte = sessionVariable.getMaxExecMemByte();
String timeZone = sessionVariable.getTimeZone();
int sendBatchParallelism = sessionVariable.getSendBatchParallelism();
request.setTxnId(txnConf.getTxnId()).setDb(txnConf.getDb())
.setTbl(txnConf.getTbl())
.setFileType(TFileType.FILE_STREAM).setFormatType(TFileFormatType.FORMAT_CSV_PLAIN)
.setMergeType(TMergeType.APPEND).setThriftRpcTimeoutMs(5000).setLoadId(context.queryId())
.setExecMemLimit(maxExecMemByte).setTimeout((int) timeoutSecond)
.setTimezone(timeZone).setSendBatchParallelism(sendBatchParallelism);
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.beginTransaction(request);
}
private void handleInsertStmt() throws Exception {
if (context.getMysqlChannel() != null) {
context.getMysqlChannel().reset();
}
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.getQueryStmt().hasOutFileClause()) {
throw new DdlException("Not support OUTFILE clause in INSERT statement");
}
if (insertStmt.getQueryStmt().isExplain()) {
ExplainOptions explainOptions = insertStmt.getQueryStmt().getExplainOptions();
insertStmt.setIsExplain(explainOptions);
String explainString = planner.getExplainString(explainOptions);
handleExplainStmt(explainString);
return;
}
analyzeVariablesInStmt(insertStmt.getQueryStmt());
long createTime = System.currentTimeMillis();
Throwable throwable = null;
long txnId = -1;
String label = "";
long loadedRows = 0;
int filteredRows = 0;
TransactionStatus txnStatus = TransactionStatus.ABORTED;
String errMsg = "";
TableType tblType = insertStmt.getTargetTable().getType();
if (context.isTxnModel()) {
if (insertStmt.getQueryStmt() instanceof SelectStmt) {
if (((SelectStmt) insertStmt.getQueryStmt()).getTableRefs().size() > 0) {
throw new TException("Insert into ** select is not supported in a transaction");
}
}
txnStatus = TransactionStatus.PREPARE;
loadedRows = executeForTxn(insertStmt);
label = context.getTxnEntry().getLabel();
txnId = context.getTxnEntry().getTxnConf().getTxnId();
} else {
label = insertStmt.getLabel();
LOG.info("Do insert [{}] with query id: {}", label, DebugUtil.printId(context.queryId()));
try {
coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator());
coord.setLoadZeroTolerance(context.getSessionVariable().getEnableInsertStrict());
coord.setQueryType(TQueryType.LOAD);
profile.addExecutionProfile(coord.getExecutionProfile());
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(), coord);
coord.exec();
int execTimeout = context.getExecTimeout();
LOG.debug("Insert execution timeout:{}", execTimeout);
boolean notTimeout = coord.join(execTimeout);
if (!coord.isDone()) {
coord.cancel();
if (notTimeout) {
errMsg = coord.getExecStatus().getErrorMsg();
ErrorReport.reportDdlException("There exists unhealthy backend. "
+ errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_EXECUTE_TIMEOUT);
}
}
if (!coord.getExecStatus().ok()) {
errMsg = coord.getExecStatus().getErrorMsg();
LOG.warn("insert failed: {}", errMsg);
ErrorReport.reportDdlException(errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
}
LOG.debug("delta files is {}", coord.getDeltaUrls());
if (coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL) != null) {
loadedRows = Long.parseLong(coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL));
}
if (coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL) != null) {
filteredRows = Integer.parseInt(coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL));
}
if (context.getSessionVariable().getEnableInsertStrict()) {
if (filteredRows > 0) {
context.getState().setError(ErrorCode.ERR_FAILED_WHEN_INSERT,
"Insert has filtered data in strict mode, tracking_url=" + coord.getTrackingUrl());
return;
}
}
if (tblType != TableType.OLAP && tblType != TableType.MATERIALIZED_VIEW) {
context.getState().setOk(loadedRows, filteredRows, null);
return;
}
if (Env.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(
insertStmt.getDbObj(), Lists.newArrayList(insertStmt.getTargetTable()),
insertStmt.getTransactionId(),
TabletCommitInfo.fromThrift(coord.getCommitInfos()),
context.getSessionVariable().getInsertVisibleTimeoutMs())) {
txnStatus = TransactionStatus.VISIBLE;
} else {
txnStatus = TransactionStatus.COMMITTED;
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", label, t);
try {
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
t.getMessage() == null ? "unknown reason" : t.getMessage());
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn", abortTxnException);
}
if (!Config.using_old_load_usage_pattern) {
StringBuilder sb = new StringBuilder(t.getMessage());
if (!Strings.isNullOrEmpty(coord.getTrackingUrl())) {
sb.append(". url: " + coord.getTrackingUrl());
}
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, sb.toString());
return;
}
/*
* If config 'using_old_load_usage_pattern' is true.
* Doris will return a label to user, and user can use this label to check load job's status,
* which exactly like the old insert stmt usage pattern.
*/
throwable = t;
} finally {
updateProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
txnId = insertStmt.getTransactionId();
try {
context.getEnv().getLoadManager()
.recordFinishedLoadJob(label, txnId, insertStmt.getDbName(),
insertStmt.getTargetTable().getId(),
EtlJobType.INSERT, createTime, throwable == null ? "" : throwable.getMessage(),
coord.getTrackingUrl(), insertStmt.getUserInfo());
} catch (MetaNotFoundException e) {
LOG.warn("Record info of insert load with error {}", e.getMessage(), e);
errMsg = "Record info of insert load with error " + e.getMessage();
}
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(label).append("', 'status':'").append(txnStatus.name());
sb.append("', 'txnId':'").append(txnId).append("'");
if (tblType == TableType.MATERIALIZED_VIEW) {
sb.append("', 'rows':'").append(loadedRows).append("'");
}
if (!Strings.isNullOrEmpty(errMsg)) {
sb.append(", 'err':'").append(errMsg).append("'");
}
sb.append("}");
context.getState().setOk(loadedRows, filteredRows, sb.toString());
context.setOrUpdateInsertResult(txnId, label, insertStmt.getDbName(), insertStmt.getTbl(),
txnStatus, loadedRows, filteredRows);
context.updateReturnRows((int) loadedRows);
}
private void handleExternalInsertStmt() {
try {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
LoadType loadType = insertStmt.getLoadType();
if (loadType == LoadType.UNKNOWN) {
throw new DdlException("Unknown load job type");
}
LoadManagerAdapter loadManagerAdapter = context.getEnv().getLoadManagerAdapter();
loadManagerAdapter.startLoadFromInsertStmt(insertStmt);
context.getState().setOk();
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleUnsupportedStmt() {
context.getMysqlChannel().reset();
context.getState().setOk();
}
private void handleSwitchStmt() throws AnalysisException {
SwitchStmt switchStmt = (SwitchStmt) parsedStmt;
try {
context.getEnv().changeCatalog(context, switchStmt.getCatalogName());
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handlePrepareStmt() throws Exception {
LOG.debug("add prepared statement {}, isBinaryProtocol {}",
prepareStmt.getName(), prepareStmt.isBinaryProtocol());
context.addPreparedStmt(prepareStmt.getName(),
new PrepareStmtContext(prepareStmt,
context, planner, analyzer, prepareStmt.getName()));
if (prepareStmt.isBinaryProtocol()) {
sendStmtPrepareOK();
}
context.getState().setOk();
}
private void handleUseStmt() throws AnalysisException {
UseStmt useStmt = (UseStmt) parsedStmt;
try {
if (Strings.isNullOrEmpty(useStmt.getClusterName())) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER);
}
if (useStmt.getCatalogName() != null) {
context.getEnv().changeCatalog(context, useStmt.getCatalogName());
}
context.getEnv().changeDb(context, useStmt.getDatabase());
} catch (DdlException e) {
LOG.warn("", e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void sendMetaData(ResultSetMetaData metaData) throws IOException {
serializer.reset();
serializer.writeVInt(metaData.getColumnCount());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (Column col : metaData.getColumns()) {
serializer.reset();
serializer.writeField(col.getName(), col.getType());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
private void sendStmtPrepareOK() throws IOException {
serializer.reset();
serializer.writeInt1(0);
serializer.writeInt4(Integer.valueOf(prepareStmt.getName()));
int numColumns = 0;
serializer.writeInt2(numColumns);
int numParams = prepareStmt.getColLabelsOfPlaceHolders().size();
serializer.writeInt2(numParams);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
if (numParams > 0) {
sendFields(prepareStmt.getColLabelsOfPlaceHolders(),
exprToType(prepareStmt.getSlotRefOfPlaceHolders()));
}
context.getState().setOk();
}
private void sendFields(List<String> colNames, List<Type> types) throws IOException {
serializer.reset();
serializer.writeVInt(colNames.size());
LOG.debug("sendFields {}", colNames);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (int i = 0; i < colNames.size(); ++i) {
serializer.reset();
if (prepareStmt != null && isExecuteStmt) {
byte[] serializedField = prepareStmt.getSerializedField(colNames.get(i));
if (serializedField == null) {
serializer.writeField(colNames.get(i), types.get(i));
serializedField = serializer.toArray();
prepareStmt.setSerializedField(colNames.get(i), serializedField);
}
context.getMysqlChannel().sendOnePacket(ByteBuffer.wrap(serializedField));
} else {
serializer.writeField(colNames.get(i), types.get(i));
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
public void sendResultSet(ResultSet resultSet) throws IOException {
context.updateReturnRows(resultSet.getResultRows().size());
sendMetaData(resultSet.getMetaData());
for (List<String> row : resultSet.getResultRows()) {
serializer.reset();
for (String item : row) {
if (item == null || item.equals(FeConstants.null_string)) {
serializer.writeNull();
} else {
serializer.writeLenEncodedString(item);
}
}
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleShow() throws IOException, AnalysisException, DdlException {
ShowExecutor executor = new ShowExecutor(context, (ShowStmt) parsedStmt);
ShowResultSet resultSet = executor.execute();
if (resultSet == null) {
return;
}
if (isProxy) {
proxyResultSet = resultSet;
return;
}
sendResultSet(resultSet);
}
private void handleUnlockTablesStmt() {
}
private void handleLockTablesStmt() {
}
public void handleExplainStmt(String result) throws IOException {
ShowResultSetMetaData metaData =
ShowResultSetMetaData.builder()
.addColumn(new Column("Explain String", ScalarType.createVarchar(20)))
.build();
sendMetaData(metaData);
for (String item : result.split("\n")) {
serializer.reset();
serializer.writeLenEncodedString(item);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleLoadStmt() {
try {
LoadStmt loadStmt = (LoadStmt) parsedStmt;
EtlJobType jobType = loadStmt.getEtlJobType();
if (jobType == EtlJobType.UNKNOWN) {
throw new DdlException("Unknown load job type");
}
if (jobType == EtlJobType.HADOOP) {
throw new DdlException("Load job by hadoop cluster is disabled."
+ " Try using broker load. See 'help broker load;'");
}
LoadManager loadManager = context.getEnv().getLoadManager();
if (jobType == EtlJobType.LOCAL_FILE) {
if (!context.getCapability().supportClientLocalFile()) {
context.getState().setError(ErrorCode.ERR_NOT_ALLOWED_COMMAND, "This client is not support"
+ " to load client local file.");
return;
}
String loadId = UUID.randomUUID().toString();
mysqlLoadId = loadId;
LoadJobRowResult submitResult = loadManager.getMysqlLoadManager()
.executeMySqlLoadJobFromStmt(context, loadStmt, loadId);
context.getState().setOk(submitResult.getRecords(), submitResult.getWarnings(),
submitResult.toString());
} else {
loadManager.createLoadJobFromStmt(loadStmt);
context.getState().setOk();
}
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleUpdateStmt() {
try {
UpdateStmt updateStmt = (UpdateStmt) parsedStmt;
parsedStmt = updateStmt.getInsertStmt();
execute();
if (MysqlStateType.ERR.equals(context.getState().getStateType())) {
LOG.warn("update data error, stmt={}", updateStmt.toSql());
}
} catch (Exception e) {
LOG.warn("update data error, stmt={}", parsedStmt.toSql(), e);
}
}
private void handleDeleteStmt() {
try {
DeleteStmt deleteStmt = (DeleteStmt) parsedStmt;
parsedStmt = deleteStmt.getInsertStmt();
execute();
if (MysqlStateType.ERR.equals(context.getState().getStateType())) {
LOG.warn("delete data error, stmt={}", deleteStmt.toSql());
}
} catch (Exception e) {
LOG.warn("delete data error, stmt={}", parsedStmt.toSql(), e);
}
}
private void handleDdlStmt() {
try {
DdlExecutor.execute(context.getEnv(), (DdlStmt) parsedStmt);
if (!(parsedStmt instanceof AnalyzeStmt)) {
context.getState().setOk();
}
} catch (QueryStateException e) {
LOG.warn("", e);
context.setState(e.getQueryState());
} catch (UserException e) {
LOG.warn("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleExportStmt() throws Exception {
ExportStmt exportStmt = (ExportStmt) parsedStmt;
context.getEnv().getExportMgr().addExportJob(exportStmt);
}
private void handleCtasStmt() {
CreateTableAsSelectStmt ctasStmt = (CreateTableAsSelectStmt) this.parsedStmt;
try {
DdlExecutor.execute(context.getEnv(), ctasStmt);
context.getState().setOk();
} catch (Exception e) {
LOG.warn("CTAS create table error, stmt={}", originStmt.originStmt, e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
if (MysqlStateType.OK.equals(context.getState().getStateType())) {
try {
parsedStmt = ctasStmt.getInsertStmt();
parsedStmt.setUserInfo(context.getCurrentUserIdentity());
execute();
if (MysqlStateType.ERR.equals(context.getState().getStateType())) {
LOG.warn("CTAS insert data error, stmt={}", ctasStmt.toSql());
handleCtasRollback(ctasStmt.getCreateTableStmt().getDbTbl());
}
} catch (Exception e) {
LOG.warn("CTAS insert data error, stmt={}", ctasStmt.toSql(), e);
handleCtasRollback(ctasStmt.getCreateTableStmt().getDbTbl());
}
}
}
private void handleCtasRollback(TableName table) {
if (context.getSessionVariable().isDropTableIfCtasFailed()) {
DropTableStmt dropTableStmt = new DropTableStmt(true, table, true);
try {
DdlExecutor.execute(context.getEnv(), dropTableStmt);
} catch (Exception ex) {
LOG.warn("CTAS drop table error, stmt={}", parsedStmt.toSql(), ex);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + ex.getMessage());
}
}
}
public Data.PQueryStatistics getQueryStatisticsForAuditLog() {
if (statisticsForAuditLog == null) {
statisticsForAuditLog = Data.PQueryStatistics.newBuilder();
}
if (!statisticsForAuditLog.hasScanBytes()) {
statisticsForAuditLog.setScanBytes(0L);
}
if (!statisticsForAuditLog.hasScanRows()) {
statisticsForAuditLog.setScanRows(0L);
}
if (!statisticsForAuditLog.hasReturnedRows()) {
statisticsForAuditLog.setReturnedRows(0L);
}
if (!statisticsForAuditLog.hasCpuMs()) {
statisticsForAuditLog.setCpuMs(0L);
}
return statisticsForAuditLog.build();
}
private List<Type> exprToType(List<Expr> exprs) {
return exprs.stream().map(e -> e.getType()).collect(Collectors.toList());
}
public StatementBase setParsedStmt(StatementBase parsedStmt) {
this.parsedStmt = parsedStmt;
this.statementContext.setParsedStatement(parsedStmt);
return parsedStmt;
}
public List<ResultRow> executeInternalQuery() {
try {
List<ResultRow> resultRows = new ArrayList<>();
try {
if (ConnectContext.get() != null
&& ConnectContext.get().getSessionVariable().isEnableNereidsPlanner()) {
try {
parseByNereids();
Preconditions.checkState(parsedStmt instanceof LogicalPlanAdapter,
"Nereids only process LogicalPlanAdapter,"
+ " but parsedStmt is " + parsedStmt.getClass().getName());
context.getState().setNereids(true);
context.getState().setIsQuery(true);
planner = new NereidsPlanner(statementContext);
planner.plan(parsedStmt, context.getSessionVariable().toThrift());
} catch (Exception e) {
LOG.warn("fall back to legacy planner, because: {}", e.getMessage(), e);
parsedStmt = null;
context.getState().setNereids(false);
analyzer = new Analyzer(context.getEnv(), context);
analyze(context.getSessionVariable().toThrift());
}
} else {
analyzer = new Analyzer(context.getEnv(), context);
analyze(context.getSessionVariable().toThrift());
}
} catch (Exception e) {
LOG.warn("Internal SQL execution failed, SQL: {}", originStmt, e);
return resultRows;
}
planner.getFragments();
RowBatch batch;
coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator());
profile.addExecutionProfile(coord.getExecutionProfile());
try {
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
} catch (UserException e) {
LOG.warn(e.getMessage(), e);
}
Span queryScheduleSpan = context.getTracer()
.spanBuilder("internal SQL schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
LOG.warn("Unexpected exception when SQL running", e);
} finally {
queryScheduleSpan.end();
}
Span fetchResultSpan = context.getTracer().spanBuilder("fetch internal SQL result")
.setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
batch = coord.getNext();
if (batch == null || batch.isEos()) {
return resultRows;
} else {
resultRows.addAll(convertResultBatchToResultRows(batch.getBatch()));
}
}
} catch (Exception e) {
LOG.warn("Unexpected exception when SQL running", e);
fetchResultSpan.recordException(e);
return resultRows;
} finally {
fetchResultSpan.end();
}
} finally {
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
private List<ResultRow> convertResultBatchToResultRows(TResultBatch batch) {
List<String> columns = parsedStmt.getColLabels();
List<PrimitiveType> types = parsedStmt.getResultExprs().stream()
.map(e -> e.getType().getPrimitiveType())
.collect(Collectors.toList());
List<ResultRow> resultRows = new ArrayList<>();
List<ByteBuffer> rows = batch.getRows();
for (ByteBuffer buffer : rows) {
List<String> values = Lists.newArrayList();
InternalQueryBuffer queryBuffer = new InternalQueryBuffer(buffer.slice());
for (int i = 0; i < columns.size(); i++) {
String value = queryBuffer.readStringWithLength();
values.add(value);
}
ResultRow resultRow = new ResultRow(columns, types, values);
resultRows.add(resultRow);
}
return resultRows;
}
public SummaryProfile getSummaryProfile() {
return profile.getSummaryProfile();
}
} |
`getCurrentInternalCatalog` only return `InternalCatalog`, no matter what is your current catalog is. But `getCurrentCatalog` will return the real current catalog(set by `switch` cmd) | private void analyzeTargetTable(Analyzer analyzer) throws AnalysisException {
if (targetTable == null) {
DatabaseIf db = analyzer.getEnv().getCurrentCatalog().getDbOrAnalysisException(tblName.getDb());
if (db instanceof Database) {
targetTable = (Table) db.getTableOrAnalysisException(tblName.getTbl());
} else if (db instanceof JdbcExternalDatabase) {
JdbcExternalTable jdbcTable = (JdbcExternalTable) db.getTableOrAnalysisException(tblName.getTbl());
targetTable = jdbcTable.getJdbcTable();
} else {
throw new AnalysisException("Un support insert target table");
}
}
if (targetTable instanceof OlapTable) {
OlapTable olapTable = (OlapTable) targetTable;
if (targetPartitionNames != null) {
targetPartitionIds = Lists.newArrayList();
if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
for (String partName : targetPartitionNames.getPartitionNames()) {
Partition part = olapTable.getPartition(partName, targetPartitionNames.isTemp());
if (part == null) {
ErrorReport.reportAnalysisException(
ErrorCode.ERR_UNKNOWN_PARTITION, partName, targetTable.getName());
}
targetPartitionIds.add(part.getId());
}
}
DescriptorTable descTable = analyzer.getDescTbl();
olapTuple = descTable.createTupleDescriptor();
for (Column col : olapTable.getFullSchema()) {
SlotDescriptor slotDesc = descTable.addSlotDescriptor(olapTuple);
slotDesc.setIsMaterialized(true);
slotDesc.setType(col.getType());
slotDesc.setColumn(col);
slotDesc.setIsNullable(col.isAllowNull());
}
indexIdToSchemaHash = olapTable.getIndexIdToSchemaHash();
} else if (targetTable instanceof MysqlTable || targetTable instanceof OdbcTable
|| targetTable instanceof JdbcTable) {
if (targetPartitionNames != null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
} else if (targetTable instanceof BrokerTable) {
if (targetPartitionNames != null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
BrokerTable brokerTable = (BrokerTable) targetTable;
if (!brokerTable.isWritable()) {
throw new AnalysisException("table " + brokerTable.getName()
+ "is not writable. path should be an dir");
}
} else {
ErrorReport.reportAnalysisException(
ErrorCode.ERR_NON_INSERTABLE_TABLE, targetTable.getName(), targetTable.getType());
}
} | DatabaseIf db = analyzer.getEnv().getCurrentCatalog().getDbOrAnalysisException(tblName.getDb()); | private void analyzeTargetTable(Analyzer analyzer) throws AnalysisException {
if (targetTable == null) {
DatabaseIf db = analyzer.getEnv().getCatalogMgr()
.getCatalog(tblName.getCtl()).getDbOrAnalysisException(tblName.getDb());
if (db instanceof Database) {
targetTable = (Table) db.getTableOrAnalysisException(tblName.getTbl());
} else if (db instanceof JdbcExternalDatabase) {
JdbcExternalTable jdbcTable = (JdbcExternalTable) db.getTableOrAnalysisException(tblName.getTbl());
targetTable = jdbcTable.getJdbcTable();
} else {
throw new AnalysisException("Not support insert target table.");
}
}
if (targetTable instanceof OlapTable) {
OlapTable olapTable = (OlapTable) targetTable;
if (targetPartitionNames != null) {
targetPartitionIds = Lists.newArrayList();
if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
for (String partName : targetPartitionNames.getPartitionNames()) {
Partition part = olapTable.getPartition(partName, targetPartitionNames.isTemp());
if (part == null) {
ErrorReport.reportAnalysisException(
ErrorCode.ERR_UNKNOWN_PARTITION, partName, targetTable.getName());
}
targetPartitionIds.add(part.getId());
}
}
DescriptorTable descTable = analyzer.getDescTbl();
olapTuple = descTable.createTupleDescriptor();
for (Column col : olapTable.getFullSchema()) {
SlotDescriptor slotDesc = descTable.addSlotDescriptor(olapTuple);
slotDesc.setIsMaterialized(true);
slotDesc.setType(col.getType());
slotDesc.setColumn(col);
slotDesc.setIsNullable(col.isAllowNull());
}
indexIdToSchemaHash = olapTable.getIndexIdToSchemaHash();
} else if (targetTable instanceof MysqlTable || targetTable instanceof OdbcTable
|| targetTable instanceof JdbcTable) {
if (targetPartitionNames != null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
} else if (targetTable instanceof BrokerTable) {
if (targetPartitionNames != null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
BrokerTable brokerTable = (BrokerTable) targetTable;
if (!brokerTable.isWritable()) {
throw new AnalysisException("table " + brokerTable.getName()
+ "is not writable. path should be an dir");
}
} else {
ErrorReport.reportAnalysisException(
ErrorCode.ERR_NON_INSERTABLE_TABLE, targetTable.getName(), targetTable.getType());
}
} | class InsertStmt extends DdlStmt {
private static final Logger LOG = LogManager.getLogger(InsertStmt.class);
public static final String SHUFFLE_HINT = "SHUFFLE";
public static final String NOSHUFFLE_HINT = "NOSHUFFLE";
public static final String STREAMING = "STREAMING";
private final TableName tblName;
private final PartitionNames targetPartitionNames;
private List<Long> targetPartitionIds;
private final List<String> targetColumnNames;
private QueryStmt queryStmt;
private final List<String> planHints;
private Boolean isRepartition;
private boolean isStreaming = false;
private String label = null;
private Map<Long, Integer> indexIdToSchemaHash = null;
private ArrayList<Expr> resultExprs = Lists.newArrayList();
private Map<String, Expr> exprByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
private Table targetTable;
private DatabaseIf db;
private long transactionId;
private TupleDescriptor olapTuple;
private DataSink dataSink;
private DataPartition dataPartition;
private List<Column> targetColumns = Lists.newArrayList();
/*
* InsertStmt may be analyzed twice, but transaction must be only begun once.
* So use a boolean to check if transaction already begun.
*/
private boolean isTransactionBegin = false;
private boolean isValuesOrConstantSelect = false;
public boolean isValuesOrConstantSelect() {
return isValuesOrConstantSelect;
}
public InsertStmt(InsertTarget target, String label, List<String> cols, InsertSource source, List<String> hints) {
this.tblName = target.getTblName();
this.targetPartitionNames = target.getPartitionNames();
this.label = label;
this.queryStmt = source.getQueryStmt();
this.planHints = hints;
this.targetColumnNames = cols;
this.isValuesOrConstantSelect = (queryStmt instanceof SelectStmt
&& ((SelectStmt) queryStmt).getTableRefs().isEmpty());
}
public InsertStmt(TableName name, QueryStmt queryStmt) {
this.tblName = name;
this.targetPartitionNames = null;
this.targetColumnNames = null;
this.queryStmt = queryStmt;
this.planHints = null;
}
public TupleDescriptor getOlapTuple() {
return olapTuple;
}
public Table getTargetTable() {
return targetTable;
}
public void setTargetTable(Table targetTable) {
this.targetTable = targetTable;
}
public Map<Long, Integer> getIndexIdToSchemaHash() {
return this.indexIdToSchemaHash;
}
public long getTransactionId() {
return this.transactionId;
}
public Boolean isRepartition() {
return isRepartition;
}
public String getDb() {
return tblName.getDb();
}
public String getTbl() {
return tblName.getTbl();
}
public void getTables(Analyzer analyzer, Map<Long, TableIf> tableMap, Set<String> parentViewNameSet)
throws AnalysisException {
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
tblName.analyze(analyzer);
if (analyzer.getEnv().getCurrentCatalog() instanceof ExternalCatalog
&& !(analyzer.getEnv().getCurrentCatalog() instanceof JdbcExternalCatalog)) {
Util.prohibitExternalCatalog(tblName.getCtl(), this.getClass().getSimpleName());
}
String dbName = tblName.getDb();
String tableName = tblName.getTbl();
DatabaseIf db = analyzer.getEnv().getCurrentCatalog().getDbOrAnalysisException(dbName);
TableIf table = db.getTableOrAnalysisException(tblName.getTbl());
if (!Env.getCurrentEnv().getAuth()
.checkTblPriv(ConnectContext.get(), dbName, tableName, PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(),
dbName + ": " + tableName);
}
tableMap.put(table.getId(), table);
}
public QueryStmt getQueryStmt() {
return queryStmt;
}
public void setQueryStmt(QueryStmt queryStmt) {
this.queryStmt = queryStmt;
}
@Override
public void foldConstant(ExprRewriter rewriter) throws AnalysisException {
Preconditions.checkState(isAnalyzed());
queryStmt.foldConstant(rewriter);
}
@Override
public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException {
Preconditions.checkState(isAnalyzed());
queryStmt.rewriteExprs(rewriter);
}
@Override
public boolean isExplain() {
return queryStmt.isExplain();
}
public boolean isStreaming() {
return isStreaming;
}
public String getLabel() {
return label;
}
public DataSink getDataSink() {
return dataSink;
}
public DatabaseIf getDbObj() {
return db;
}
public boolean isTransactionBegin() {
return isTransactionBegin;
}
@Override
public void analyze(Analyzer analyzer) throws UserException {
super.analyze(analyzer);
if (targetTable == null) {
tblName.analyze(analyzer);
if (analyzer.getEnv().getCurrentCatalog() instanceof ExternalCatalog
&& !(analyzer.getEnv().getCurrentCatalog() instanceof JdbcExternalCatalog)) {
Util.prohibitExternalCatalog(tblName.getCtl(), this.getClass().getSimpleName());
}
}
if (!Env.getCurrentEnv().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(),
tblName.getTbl(), PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(), tblName.getDb() + ": " + tblName.getTbl());
}
if (targetPartitionNames != null) {
targetPartitionNames.analyze(analyzer);
}
analyzeTargetTable(analyzer);
analyzeSubquery(analyzer);
analyzePlanHints(analyzer);
if (analyzer.getContext().isTxnModel()) {
return;
}
createDataSink();
db = analyzer.getEnv().getCurrentCatalog().getDbOrAnalysisException(tblName.getDb());
long timeoutSecond = ConnectContext.get().getSessionVariable().getQueryTimeoutS();
if (Strings.isNullOrEmpty(label)) {
label = "insert_" + DebugUtil.printId(analyzer.getContext().queryId()).replace("-", "_");
}
if (!isExplain() && !isTransactionBegin) {
if (targetTable instanceof OlapTable) {
LoadJobSourceType sourceType = LoadJobSourceType.INSERT_STREAMING;
transactionId = Env.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),
Lists.newArrayList(targetTable.getId()), label,
new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
sourceType, timeoutSecond);
}
isTransactionBegin = true;
}
if (!isExplain() && targetTable instanceof OlapTable) {
OlapTableSink sink = (OlapTableSink) dataSink;
TUniqueId loadId = analyzer.getContext().queryId();
int sendBatchParallelism = analyzer.getContext().getSessionVariable().getSendBatchParallelism();
sink.init(loadId, transactionId, db.getId(), timeoutSecond, sendBatchParallelism, false);
}
}
private void checkColumnCoverage(Set<String> mentionedCols, List<Column> baseColumns)
throws AnalysisException {
for (Column col : baseColumns) {
if (mentionedCols.contains(col.getName())) {
continue;
}
if (col.getDefaultValue() == null && !col.isAllowNull()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_COL_NOT_MENTIONED, col.getName());
}
}
}
private void analyzeSubquery(Analyzer analyzer) throws UserException {
Set<String> mentionedColumns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
if (targetColumnNames == null) {
for (Column col : targetTable.getBaseSchema(false)) {
mentionedColumns.add(col.getName());
targetColumns.add(col);
}
} else {
for (String colName : targetColumnNames) {
Column col = targetTable.getColumn(colName);
if (col == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, colName, targetTable.getName());
}
if (!mentionedColumns.add(colName)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_FIELD_SPECIFIED_TWICE, colName);
}
targetColumns.add(col);
}
for (Column col : targetTable.getBaseSchema()) {
if (col.getType().isObjectStored() && !mentionedColumns.contains(col.getName())) {
throw new AnalysisException(" object-stored column " + col.getName()
+ " mush in insert into columns");
}
}
}
/*
* When doing schema change, there may be some shadow columns. we should add
* them to the end of targetColumns. And use 'origColIdxsForExtendCols' to save
* the index of column in 'targetColumns' which the shadow column related to.
* eg: origin targetColumns: (A,B,C), shadow column: __doris_shadow_B after
* processing, targetColumns: (A, B, C, __doris_shadow_B), and
* origColIdxsForExtendCols has 1 element: "1", which is the index of column B
* in targetColumns.
*
* Rule A: If the column which the shadow column related to is not mentioned,
* then do not add the shadow column to targetColumns. They will be filled by
* null or default value when loading.
*
* When table have materialized view, there may be some materialized view columns.
* we should add them to the end of targetColumns.
* eg: origin targetColumns: (A,B,C), shadow column: mv_bitmap_union_C
* after processing, targetColumns: (A, B, C, mv_bitmap_union_C), and
* origColIdx2MVColumn has 1 element: "2, mv_bitmap_union_C"
* will be used in as a mapping from queryStmt.getResultExprs() to targetColumns define expr
*/
List<Pair<Integer, Column>> origColIdxsForExtendCols = Lists.newArrayList();
for (Column column : targetTable.getFullSchema()) {
if (column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PREFIX)) {
String origName = Column.removeNamePrefix(column.getName());
for (int i = 0; i < targetColumns.size(); i++) {
if (targetColumns.get(i).nameEquals(origName, false)) {
origColIdxsForExtendCols.add(Pair.of(i, null));
targetColumns.add(column);
break;
}
}
}
if (column.isNameWithPrefix(CreateMaterializedViewStmt.MATERIALIZED_VIEW_NAME_PREFIX)) {
SlotRef refColumn = column.getRefColumn();
if (refColumn == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR,
column.getName(), targetTable.getName());
}
String origName = refColumn.getColumnName();
for (int originColumnIdx = 0; originColumnIdx < targetColumns.size(); originColumnIdx++) {
if (targetColumns.get(originColumnIdx).nameEquals(origName, false)) {
origColIdxsForExtendCols.add(Pair.of(originColumnIdx, column));
targetColumns.add(column);
break;
}
}
}
}
queryStmt.setFromInsert(true);
queryStmt.analyze(analyzer);
if (mentionedColumns.size() != queryStmt.getResultExprs().size()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_COUNT);
}
checkColumnCoverage(mentionedColumns, targetTable.getBaseSchema());
if (isValuesOrConstantSelect) {
SelectStmt selectStmt = (SelectStmt) queryStmt;
if (selectStmt.getValueList() != null) {
List<ArrayList<Expr>> rows = selectStmt.getValueList().getRows();
for (int rowIdx = 0; rowIdx < rows.size(); ++rowIdx) {
analyzeRow(analyzer, targetColumns, rows, rowIdx, origColIdxsForExtendCols);
}
selectStmt.getResultExprs().clear();
selectStmt.getBaseTblResultExprs().clear();
for (int i = 0; i < selectStmt.getValueList().getFirstRow().size(); ++i) {
selectStmt.getResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));
selectStmt.getBaseTblResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));
}
} else {
List<ArrayList<Expr>> rows = Lists.newArrayList();
rows.add(Lists.newArrayList(selectStmt.getResultExprs()));
analyzeRow(analyzer, targetColumns, rows, 0, origColIdxsForExtendCols);
selectStmt.getResultExprs().clear();
for (Expr expr : rows.get(0)) {
selectStmt.getResultExprs().add(expr);
}
}
isStreaming = true;
} else {
if (!origColIdxsForExtendCols.isEmpty()) {
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
if (entry.second == null) {
queryStmt.getResultExprs().add(queryStmt.getResultExprs().get(entry.first));
} else {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(queryStmt.getResultExprs().get(entry.first));
Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
smap, analyzer, false).get(0);
queryStmt.getResultExprs().add(e);
}
}
}
for (int i = 0; i < targetColumns.size(); ++i) {
Column column = targetColumns.get(i);
Expr expr = queryStmt.getResultExprs().get(i);
queryStmt.getResultExprs().set(i, expr.checkTypeCompatibility(column.getType()));
}
}
if (!origColIdxsForExtendCols.isEmpty()) {
if (queryStmt.getResultExprs().size() != queryStmt.getBaseTblResultExprs().size()) {
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
if (entry.second == null) {
queryStmt.getBaseTblResultExprs().add(queryStmt.getBaseTblResultExprs().get(entry.first));
} else {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(queryStmt.getResultExprs().get(entry.first));
Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
smap, analyzer, false).get(0);
queryStmt.getBaseTblResultExprs().add(e);
}
}
}
if (queryStmt.getResultExprs().size() != queryStmt.getColLabels().size()) {
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
queryStmt.getColLabels().add(queryStmt.getColLabels().get(entry.first));
}
}
}
if (LOG.isDebugEnabled()) {
for (Expr expr : queryStmt.getResultExprs()) {
LOG.debug("final result expr: {}, {}", expr, System.identityHashCode(expr));
}
for (Expr expr : queryStmt.getBaseTblResultExprs()) {
LOG.debug("final base table result expr: {}, {}", expr, System.identityHashCode(expr));
}
for (String colLabel : queryStmt.getColLabels()) {
LOG.debug("final col label: {}", colLabel);
}
}
}
private void analyzeRow(Analyzer analyzer, List<Column> targetColumns, List<ArrayList<Expr>> rows,
int rowIdx, List<Pair<Integer, Column>> origColIdxsForExtendCols) throws AnalysisException {
if (rows.get(rowIdx).size() != targetColumns.size() - origColIdxsForExtendCols.size()) {
throw new AnalysisException("Column count doesn't match value count at row " + (rowIdx + 1));
}
ArrayList<Expr> row = rows.get(rowIdx);
if (!origColIdxsForExtendCols.isEmpty()) {
/**
* we should extend the row for shadow columns.
* eg:
* the origin row has exprs: (expr1, expr2, expr3), and targetColumns is (A, B, C, __doris_shadow_b)
* after processing, extentedRow is (expr1, expr2, expr3, expr2)
*/
ArrayList<Expr> extentedRow = Lists.newArrayList();
extentedRow.addAll(row);
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
if (entry != null) {
if (entry.second == null) {
extentedRow.add(extentedRow.get(entry.first));
} else {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(extentedRow.get(entry.first));
extentedRow.add(Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
smap, analyzer, false).get(0));
}
}
}
row = extentedRow;
rows.set(rowIdx, row);
}
for (int i = 0; i < row.size(); ++i) {
Expr expr = row.get(i);
Column col = targetColumns.get(i);
if (expr instanceof DefaultValueExpr) {
if (targetColumns.get(i).getDefaultValue() == null) {
throw new AnalysisException("Column has no default value, column="
+ targetColumns.get(i).getName());
}
expr = new StringLiteral(targetColumns.get(i).getDefaultValue());
}
expr.analyze(analyzer);
row.set(i, expr.checkTypeCompatibility(col.getType()));
}
}
private void analyzePlanHints(Analyzer analyzer) throws AnalysisException {
if (planHints == null) {
return;
}
for (String hint : planHints) {
if (SHUFFLE_HINT.equalsIgnoreCase(hint)) {
if (!targetTable.isPartitioned()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);
}
if (isRepartition != null && !isRepartition) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);
}
isRepartition = Boolean.TRUE;
} else if (NOSHUFFLE_HINT.equalsIgnoreCase(hint)) {
if (!targetTable.isPartitioned()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);
}
if (isRepartition != null && isRepartition) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);
}
isRepartition = Boolean.FALSE;
} else if (STREAMING.equalsIgnoreCase(hint)) {
isStreaming = true;
} else {
ErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_PLAN_HINT, hint);
}
}
}
public void prepareExpressions() throws UserException {
List<Expr> selectList = Expr.cloneList(queryStmt.getResultExprs());
int numCols = targetColumns.size();
for (int i = 0; i < numCols; ++i) {
Column col = targetColumns.get(i);
Expr expr = selectList.get(i).checkTypeCompatibility(col.getType());
selectList.set(i, expr);
exprByName.put(col.getName(), expr);
}
for (Column col : targetTable.getFullSchema()) {
if (exprByName.containsKey(col.getName())) {
resultExprs.add(exprByName.get(col.getName()));
} else {
if (targetTable instanceof OlapTable && ((OlapTable) targetTable).hasSequenceCol()
&& col.getName().equals(Column.SEQUENCE_COL)
&& ((OlapTable) targetTable).getSequenceMapCol() != null) {
resultExprs.add(exprByName.get(((OlapTable) targetTable).getSequenceMapCol()));
} else if (col.getDefaultValue() == null) {
/*
The import stmt has been filtered in function checkColumnCoverage when
the default value of column is null and column is not nullable.
So the default value of column may simply be null when column is nullable
*/
Preconditions.checkState(col.isAllowNull());
resultExprs.add(NullLiteral.create(col.getType()));
} else {
if (col.getDefaultValueExprDef() != null) {
resultExprs.add(col.getDefaultValueExpr());
} else {
StringLiteral defaultValueExpr;
defaultValueExpr = new StringLiteral(col.getDefaultValue());
resultExprs.add(defaultValueExpr.checkTypeCompatibility(col.getType()));
}
}
}
}
}
private DataSink createDataSink() throws AnalysisException {
if (dataSink != null) {
return dataSink;
}
if (targetTable instanceof OlapTable) {
dataSink = new OlapTableSink((OlapTable) targetTable, olapTuple, targetPartitionIds,
analyzer.getContext().getSessionVariable().isEnableSingleReplicaInsert());
dataPartition = dataSink.getOutputPartition();
} else if (targetTable instanceof BrokerTable) {
BrokerTable table = (BrokerTable) targetTable;
BrokerDesc brokerDesc = new BrokerDesc(table.getBrokerName(), table.getBrokerProperties());
dataSink = new ExportSink(
table.getWritablePath(),
table.getColumnSeparator(),
table.getLineDelimiter(),
brokerDesc);
dataPartition = dataSink.getOutputPartition();
} else {
dataSink = DataSink.createDataSink(targetTable);
dataPartition = DataPartition.UNPARTITIONED;
}
return dataSink;
}
public void complete() throws UserException {
if (!isExplain() && targetTable instanceof OlapTable) {
((OlapTableSink) dataSink).complete();
TransactionState txnState = Env.getCurrentGlobalTransactionMgr()
.getTransactionState(db.getId(), transactionId);
if (txnState == null) {
throw new DdlException("txn does not exist: " + transactionId);
}
txnState.addTableIndexes((OlapTable) targetTable);
}
}
@Override
public ArrayList<Expr> getResultExprs() {
return resultExprs;
}
public DataPartition getDataPartition() {
return dataPartition;
}
@Override
public void reset() {
super.reset();
if (targetPartitionIds != null) {
targetPartitionIds.clear();
}
queryStmt.reset();
resultExprs.clear();
exprByName.clear();
dataSink = null;
dataPartition = null;
targetColumns.clear();
}
@Override
public RedirectStatus getRedirectStatus() {
if (isExplain()) {
return RedirectStatus.NO_FORWARD;
} else {
return RedirectStatus.FORWARD_WITH_SYNC;
}
}
} | class InsertStmt extends DdlStmt {
private static final Logger LOG = LogManager.getLogger(InsertStmt.class);
public static final String SHUFFLE_HINT = "SHUFFLE";
public static final String NOSHUFFLE_HINT = "NOSHUFFLE";
public static final String STREAMING = "STREAMING";
private final TableName tblName;
private final PartitionNames targetPartitionNames;
private List<Long> targetPartitionIds;
private final List<String> targetColumnNames;
private QueryStmt queryStmt;
private final List<String> planHints;
private Boolean isRepartition;
private boolean isStreaming = false;
private String label = null;
private Map<Long, Integer> indexIdToSchemaHash = null;
private ArrayList<Expr> resultExprs = Lists.newArrayList();
private Map<String, Expr> exprByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
private Table targetTable;
private DatabaseIf db;
private long transactionId;
private TupleDescriptor olapTuple;
private DataSink dataSink;
private DataPartition dataPartition;
private List<Column> targetColumns = Lists.newArrayList();
/*
* InsertStmt may be analyzed twice, but transaction must be only begun once.
* So use a boolean to check if transaction already begun.
*/
private boolean isTransactionBegin = false;
private boolean isValuesOrConstantSelect = false;
public boolean isValuesOrConstantSelect() {
return isValuesOrConstantSelect;
}
public InsertStmt(InsertTarget target, String label, List<String> cols, InsertSource source, List<String> hints) {
this.tblName = target.getTblName();
this.targetPartitionNames = target.getPartitionNames();
this.label = label;
this.queryStmt = source.getQueryStmt();
this.planHints = hints;
this.targetColumnNames = cols;
this.isValuesOrConstantSelect = (queryStmt instanceof SelectStmt
&& ((SelectStmt) queryStmt).getTableRefs().isEmpty());
}
public InsertStmt(TableName name, QueryStmt queryStmt) {
this.tblName = name;
this.targetPartitionNames = null;
this.targetColumnNames = null;
this.queryStmt = queryStmt;
this.planHints = null;
}
public TupleDescriptor getOlapTuple() {
return olapTuple;
}
public Table getTargetTable() {
return targetTable;
}
public void setTargetTable(Table targetTable) {
this.targetTable = targetTable;
}
public Map<Long, Integer> getIndexIdToSchemaHash() {
return this.indexIdToSchemaHash;
}
public long getTransactionId() {
return this.transactionId;
}
public Boolean isRepartition() {
return isRepartition;
}
public String getDb() {
return tblName.getDb();
}
public String getTbl() {
return tblName.getTbl();
}
public void getTables(Analyzer analyzer, Map<Long, TableIf> tableMap, Set<String> parentViewNameSet)
throws AnalysisException {
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
tblName.analyze(analyzer);
if (analyzer.getEnv().getCurrentCatalog() instanceof ExternalCatalog
&& !(analyzer.getEnv().getCurrentCatalog() instanceof JdbcExternalCatalog)) {
Util.prohibitExternalCatalog(tblName.getCtl(), this.getClass().getSimpleName());
}
String dbName = tblName.getDb();
String tableName = tblName.getTbl();
DatabaseIf db = analyzer.getEnv().getCatalogMgr().getCatalog(tblName.getCtl()).getDbOrAnalysisException(dbName);
TableIf table = db.getTableOrAnalysisException(tblName.getTbl());
if (!Env.getCurrentEnv().getAuth()
.checkTblPriv(ConnectContext.get(), dbName, tableName, PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(),
dbName + ": " + tableName);
}
tableMap.put(table.getId(), table);
}
public QueryStmt getQueryStmt() {
return queryStmt;
}
public void setQueryStmt(QueryStmt queryStmt) {
this.queryStmt = queryStmt;
}
@Override
public void foldConstant(ExprRewriter rewriter) throws AnalysisException {
Preconditions.checkState(isAnalyzed());
queryStmt.foldConstant(rewriter);
}
@Override
public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException {
Preconditions.checkState(isAnalyzed());
queryStmt.rewriteExprs(rewriter);
}
@Override
public boolean isExplain() {
return queryStmt.isExplain();
}
public boolean isStreaming() {
return isStreaming;
}
public String getLabel() {
return label;
}
public DataSink getDataSink() {
return dataSink;
}
public DatabaseIf getDbObj() {
return db;
}
public boolean isTransactionBegin() {
return isTransactionBegin;
}
@Override
public void analyze(Analyzer analyzer) throws UserException {
super.analyze(analyzer);
if (targetTable == null) {
tblName.analyze(analyzer);
if (analyzer.getEnv().getCurrentCatalog() instanceof ExternalCatalog
&& !(analyzer.getEnv().getCurrentCatalog() instanceof JdbcExternalCatalog)) {
Util.prohibitExternalCatalog(tblName.getCtl(), this.getClass().getSimpleName());
}
}
if (!Env.getCurrentEnv().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(),
tblName.getTbl(), PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(), tblName.getDb() + ": " + tblName.getTbl());
}
if (targetPartitionNames != null) {
targetPartitionNames.analyze(analyzer);
}
analyzeTargetTable(analyzer);
analyzeSubquery(analyzer);
analyzePlanHints(analyzer);
if (analyzer.getContext().isTxnModel()) {
return;
}
createDataSink();
db = analyzer.getEnv().getCatalogMgr().getCatalog(tblName.getCtl()).getDbOrAnalysisException(tblName.getDb());
long timeoutSecond = ConnectContext.get().getSessionVariable().getQueryTimeoutS();
if (Strings.isNullOrEmpty(label)) {
label = "insert_" + DebugUtil.printId(analyzer.getContext().queryId()).replace("-", "_");
}
if (!isExplain() && !isTransactionBegin) {
if (targetTable instanceof OlapTable) {
LoadJobSourceType sourceType = LoadJobSourceType.INSERT_STREAMING;
transactionId = Env.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),
Lists.newArrayList(targetTable.getId()), label,
new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
sourceType, timeoutSecond);
}
isTransactionBegin = true;
}
if (!isExplain() && targetTable instanceof OlapTable) {
OlapTableSink sink = (OlapTableSink) dataSink;
TUniqueId loadId = analyzer.getContext().queryId();
int sendBatchParallelism = analyzer.getContext().getSessionVariable().getSendBatchParallelism();
sink.init(loadId, transactionId, db.getId(), timeoutSecond, sendBatchParallelism, false);
}
}
private void checkColumnCoverage(Set<String> mentionedCols, List<Column> baseColumns)
throws AnalysisException {
for (Column col : baseColumns) {
if (mentionedCols.contains(col.getName())) {
continue;
}
if (col.getDefaultValue() == null && !col.isAllowNull()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_COL_NOT_MENTIONED, col.getName());
}
}
}
private void analyzeSubquery(Analyzer analyzer) throws UserException {
Set<String> mentionedColumns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
if (targetColumnNames == null) {
for (Column col : targetTable.getBaseSchema(false)) {
mentionedColumns.add(col.getName());
targetColumns.add(col);
}
} else {
for (String colName : targetColumnNames) {
Column col = targetTable.getColumn(colName);
if (col == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, colName, targetTable.getName());
}
if (!mentionedColumns.add(colName)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_FIELD_SPECIFIED_TWICE, colName);
}
targetColumns.add(col);
}
for (Column col : targetTable.getBaseSchema()) {
if (col.getType().isObjectStored() && !mentionedColumns.contains(col.getName())) {
throw new AnalysisException(" object-stored column " + col.getName()
+ " mush in insert into columns");
}
}
}
/*
* When doing schema change, there may be some shadow columns. we should add
* them to the end of targetColumns. And use 'origColIdxsForExtendCols' to save
* the index of column in 'targetColumns' which the shadow column related to.
* eg: origin targetColumns: (A,B,C), shadow column: __doris_shadow_B after
* processing, targetColumns: (A, B, C, __doris_shadow_B), and
* origColIdxsForExtendCols has 1 element: "1", which is the index of column B
* in targetColumns.
*
* Rule A: If the column which the shadow column related to is not mentioned,
* then do not add the shadow column to targetColumns. They will be filled by
* null or default value when loading.
*
* When table have materialized view, there may be some materialized view columns.
* we should add them to the end of targetColumns.
* eg: origin targetColumns: (A,B,C), shadow column: mv_bitmap_union_C
* after processing, targetColumns: (A, B, C, mv_bitmap_union_C), and
* origColIdx2MVColumn has 1 element: "2, mv_bitmap_union_C"
* will be used in as a mapping from queryStmt.getResultExprs() to targetColumns define expr
*/
List<Pair<Integer, Column>> origColIdxsForExtendCols = Lists.newArrayList();
for (Column column : targetTable.getFullSchema()) {
if (column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PREFIX)) {
String origName = Column.removeNamePrefix(column.getName());
for (int i = 0; i < targetColumns.size(); i++) {
if (targetColumns.get(i).nameEquals(origName, false)) {
origColIdxsForExtendCols.add(Pair.of(i, null));
targetColumns.add(column);
break;
}
}
}
if (column.isNameWithPrefix(CreateMaterializedViewStmt.MATERIALIZED_VIEW_NAME_PREFIX)) {
SlotRef refColumn = column.getRefColumn();
if (refColumn == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR,
column.getName(), targetTable.getName());
}
String origName = refColumn.getColumnName();
for (int originColumnIdx = 0; originColumnIdx < targetColumns.size(); originColumnIdx++) {
if (targetColumns.get(originColumnIdx).nameEquals(origName, false)) {
origColIdxsForExtendCols.add(Pair.of(originColumnIdx, column));
targetColumns.add(column);
break;
}
}
}
}
queryStmt.setFromInsert(true);
queryStmt.analyze(analyzer);
if (mentionedColumns.size() != queryStmt.getResultExprs().size()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_COUNT);
}
checkColumnCoverage(mentionedColumns, targetTable.getBaseSchema());
if (isValuesOrConstantSelect) {
SelectStmt selectStmt = (SelectStmt) queryStmt;
if (selectStmt.getValueList() != null) {
List<ArrayList<Expr>> rows = selectStmt.getValueList().getRows();
for (int rowIdx = 0; rowIdx < rows.size(); ++rowIdx) {
analyzeRow(analyzer, targetColumns, rows, rowIdx, origColIdxsForExtendCols);
}
selectStmt.getResultExprs().clear();
selectStmt.getBaseTblResultExprs().clear();
for (int i = 0; i < selectStmt.getValueList().getFirstRow().size(); ++i) {
selectStmt.getResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));
selectStmt.getBaseTblResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));
}
} else {
List<ArrayList<Expr>> rows = Lists.newArrayList();
rows.add(Lists.newArrayList(selectStmt.getResultExprs()));
analyzeRow(analyzer, targetColumns, rows, 0, origColIdxsForExtendCols);
selectStmt.getResultExprs().clear();
for (Expr expr : rows.get(0)) {
selectStmt.getResultExprs().add(expr);
}
}
isStreaming = true;
} else {
if (!origColIdxsForExtendCols.isEmpty()) {
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
if (entry.second == null) {
queryStmt.getResultExprs().add(queryStmt.getResultExprs().get(entry.first));
} else {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(queryStmt.getResultExprs().get(entry.first));
Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
smap, analyzer, false).get(0);
queryStmt.getResultExprs().add(e);
}
}
}
for (int i = 0; i < targetColumns.size(); ++i) {
Column column = targetColumns.get(i);
Expr expr = queryStmt.getResultExprs().get(i);
queryStmt.getResultExprs().set(i, expr.checkTypeCompatibility(column.getType()));
}
}
if (!origColIdxsForExtendCols.isEmpty()) {
if (queryStmt.getResultExprs().size() != queryStmt.getBaseTblResultExprs().size()) {
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
if (entry.second == null) {
queryStmt.getBaseTblResultExprs().add(queryStmt.getBaseTblResultExprs().get(entry.first));
} else {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(queryStmt.getResultExprs().get(entry.first));
Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
smap, analyzer, false).get(0);
queryStmt.getBaseTblResultExprs().add(e);
}
}
}
if (queryStmt.getResultExprs().size() != queryStmt.getColLabels().size()) {
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
queryStmt.getColLabels().add(queryStmt.getColLabels().get(entry.first));
}
}
}
if (LOG.isDebugEnabled()) {
for (Expr expr : queryStmt.getResultExprs()) {
LOG.debug("final result expr: {}, {}", expr, System.identityHashCode(expr));
}
for (Expr expr : queryStmt.getBaseTblResultExprs()) {
LOG.debug("final base table result expr: {}, {}", expr, System.identityHashCode(expr));
}
for (String colLabel : queryStmt.getColLabels()) {
LOG.debug("final col label: {}", colLabel);
}
}
}
private void analyzeRow(Analyzer analyzer, List<Column> targetColumns, List<ArrayList<Expr>> rows,
int rowIdx, List<Pair<Integer, Column>> origColIdxsForExtendCols) throws AnalysisException {
if (rows.get(rowIdx).size() != targetColumns.size() - origColIdxsForExtendCols.size()) {
throw new AnalysisException("Column count doesn't match value count at row " + (rowIdx + 1));
}
ArrayList<Expr> row = rows.get(rowIdx);
if (!origColIdxsForExtendCols.isEmpty()) {
/**
* we should extend the row for shadow columns.
* eg:
* the origin row has exprs: (expr1, expr2, expr3), and targetColumns is (A, B, C, __doris_shadow_b)
* after processing, extentedRow is (expr1, expr2, expr3, expr2)
*/
ArrayList<Expr> extentedRow = Lists.newArrayList();
extentedRow.addAll(row);
for (Pair<Integer, Column> entry : origColIdxsForExtendCols) {
if (entry != null) {
if (entry.second == null) {
extentedRow.add(extentedRow.get(entry.first));
} else {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(extentedRow.get(entry.first));
extentedRow.add(Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
smap, analyzer, false).get(0));
}
}
}
row = extentedRow;
rows.set(rowIdx, row);
}
for (int i = 0; i < row.size(); ++i) {
Expr expr = row.get(i);
Column col = targetColumns.get(i);
if (expr instanceof DefaultValueExpr) {
if (targetColumns.get(i).getDefaultValue() == null) {
throw new AnalysisException("Column has no default value, column="
+ targetColumns.get(i).getName());
}
expr = new StringLiteral(targetColumns.get(i).getDefaultValue());
}
expr.analyze(analyzer);
row.set(i, expr.checkTypeCompatibility(col.getType()));
}
}
private void analyzePlanHints(Analyzer analyzer) throws AnalysisException {
if (planHints == null) {
return;
}
for (String hint : planHints) {
if (SHUFFLE_HINT.equalsIgnoreCase(hint)) {
if (!targetTable.isPartitioned()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);
}
if (isRepartition != null && !isRepartition) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);
}
isRepartition = Boolean.TRUE;
} else if (NOSHUFFLE_HINT.equalsIgnoreCase(hint)) {
if (!targetTable.isPartitioned()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);
}
if (isRepartition != null && isRepartition) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);
}
isRepartition = Boolean.FALSE;
} else if (STREAMING.equalsIgnoreCase(hint)) {
isStreaming = true;
} else {
ErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_PLAN_HINT, hint);
}
}
}
public void prepareExpressions() throws UserException {
List<Expr> selectList = Expr.cloneList(queryStmt.getResultExprs());
int numCols = targetColumns.size();
for (int i = 0; i < numCols; ++i) {
Column col = targetColumns.get(i);
Expr expr = selectList.get(i).checkTypeCompatibility(col.getType());
selectList.set(i, expr);
exprByName.put(col.getName(), expr);
}
for (Column col : targetTable.getFullSchema()) {
if (exprByName.containsKey(col.getName())) {
resultExprs.add(exprByName.get(col.getName()));
} else {
if (targetTable instanceof OlapTable && ((OlapTable) targetTable).hasSequenceCol()
&& col.getName().equals(Column.SEQUENCE_COL)
&& ((OlapTable) targetTable).getSequenceMapCol() != null) {
resultExprs.add(exprByName.get(((OlapTable) targetTable).getSequenceMapCol()));
} else if (col.getDefaultValue() == null) {
/*
The import stmt has been filtered in function checkColumnCoverage when
the default value of column is null and column is not nullable.
So the default value of column may simply be null when column is nullable
*/
Preconditions.checkState(col.isAllowNull());
resultExprs.add(NullLiteral.create(col.getType()));
} else {
if (col.getDefaultValueExprDef() != null) {
resultExprs.add(col.getDefaultValueExpr());
} else {
StringLiteral defaultValueExpr;
defaultValueExpr = new StringLiteral(col.getDefaultValue());
resultExprs.add(defaultValueExpr.checkTypeCompatibility(col.getType()));
}
}
}
}
}
private DataSink createDataSink() throws AnalysisException {
if (dataSink != null) {
return dataSink;
}
if (targetTable instanceof OlapTable) {
dataSink = new OlapTableSink((OlapTable) targetTable, olapTuple, targetPartitionIds,
analyzer.getContext().getSessionVariable().isEnableSingleReplicaInsert());
dataPartition = dataSink.getOutputPartition();
} else if (targetTable instanceof BrokerTable) {
BrokerTable table = (BrokerTable) targetTable;
BrokerDesc brokerDesc = new BrokerDesc(table.getBrokerName(), table.getBrokerProperties());
dataSink = new ExportSink(
table.getWritablePath(),
table.getColumnSeparator(),
table.getLineDelimiter(),
brokerDesc);
dataPartition = dataSink.getOutputPartition();
} else {
dataSink = DataSink.createDataSink(targetTable);
dataPartition = DataPartition.UNPARTITIONED;
}
return dataSink;
}
public void complete() throws UserException {
if (!isExplain() && targetTable instanceof OlapTable) {
((OlapTableSink) dataSink).complete();
TransactionState txnState = Env.getCurrentGlobalTransactionMgr()
.getTransactionState(db.getId(), transactionId);
if (txnState == null) {
throw new DdlException("txn does not exist: " + transactionId);
}
txnState.addTableIndexes((OlapTable) targetTable);
}
}
@Override
public ArrayList<Expr> getResultExprs() {
return resultExprs;
}
public DataPartition getDataPartition() {
return dataPartition;
}
@Override
public void reset() {
super.reset();
if (targetPartitionIds != null) {
targetPartitionIds.clear();
}
queryStmt.reset();
resultExprs.clear();
exprByName.clear();
dataSink = null;
dataPartition = null;
targetColumns.clear();
}
@Override
public RedirectStatus getRedirectStatus() {
if (isExplain()) {
return RedirectStatus.NO_FORWARD;
} else {
return RedirectStatus.FORWARD_WITH_SYNC;
}
}
} |
But if I throw a `OIDCException` it is going to retry the discovery, right ? I just want to fail fast. | private TenantConfigContext createTenantContext(Vertx vertx, OidcTenantConfig oidcConfig, String tenantId) {
if (!oidcConfig.tenantEnabled) {
LOG.debugf("%s tenant configuration is disabled", tenantId);
return null;
}
OAuth2ClientOptions options = new OAuth2ClientOptions();
if (oidcConfig.getClientId().isPresent()) {
options.setClientID(oidcConfig.getClientId().get());
}
if (oidcConfig.getToken().issuer.isPresent()) {
options.setValidateIssuer(false);
}
if (oidcConfig.getToken().getExpirationGrace().isPresent()) {
JWTOptions jwtOptions = new JWTOptions();
jwtOptions.setLeeway(oidcConfig.getToken().getExpirationGrace().get());
options.setJWTOptions(jwtOptions);
}
if (oidcConfig.getPublicKey().isPresent()) {
if (oidcConfig.applicationType == ApplicationType.WEB_APP) {
throw new ConfigurationException("'public-key' property can only be used with the 'service' applications");
}
LOG.info("'public-key' property for the local token verification is set,"
+ " no connection to the OIDC server will be created");
options.addPubSecKey(new PubSecKeyOptions()
.setAlgorithm("RS256")
.setPublicKey(oidcConfig.getPublicKey().get()));
return new TenantConfigContext(new OAuth2AuthProviderImpl(vertx, options), oidcConfig);
}
if (!oidcConfig.getAuthServerUrl().isPresent() || !oidcConfig.getClientId().isPresent()) {
throw new ConfigurationException(
"Both 'auth-server-url' and 'client-id' or alterntively 'public-key' must be configured"
+ " when the quarkus-oidc extension is enabled");
}
options.setSite(oidcConfig.getAuthServerUrl().get());
if (oidcConfig.getIntrospectionPath().isPresent()) {
options.setIntrospectionPath(oidcConfig.getIntrospectionPath().get());
}
if (oidcConfig.getJwksPath().isPresent()) {
options.setJwkPath(oidcConfig.getJwksPath().get());
}
Credentials creds = oidcConfig.getCredentials();
if (creds.secret.isPresent() && (creds.clientSecret.value.isPresent() || creds.clientSecret.method.isPresent())) {
throw new ConfigurationException(
"'credentials.secret' and 'credentials.client-secret' properties are mutually exclusive");
}
if (creds.secret.isPresent()
|| creds.clientSecret.value.isPresent()
&& creds.clientSecret.method.orElseGet(() -> Secret.Method.BASIC) == Secret.Method.BASIC) {
options.setClientSecret(creds.secret.orElseGet(() -> creds.clientSecret.value.get()));
} else {
options.setClientSecretParameterName(null);
}
Optional<ProxyOptions> proxyOpt = toProxyOptions(oidcConfig.getProxy());
if (proxyOpt.isPresent()) {
options.setProxyOptions(proxyOpt.get());
}
if (oidcConfig.tls.verification == Verification.NONE) {
options.setTrustAll(true);
options.setVerifyHost(false);
}
final long connectionDelayInSecs = oidcConfig.getConnectionDelay().isPresent()
? oidcConfig.getConnectionDelay().get().toMillis() / 1000
: 0;
final long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1;
if (connectionRetryCount > 1) {
LOG.infof("Connecting to IDP for up to %d times every 2 seconds", connectionRetryCount);
}
OAuth2Auth auth = null;
for (long i = 0; i < connectionRetryCount; i++) {
try {
CompletableFuture<OAuth2Auth> cf = new CompletableFuture<>();
KeycloakAuth.discover(vertx, options, new Handler<AsyncResult<OAuth2Auth>>() {
@Override
public void handle(AsyncResult<OAuth2Auth> event) {
if (event.failed()) {
cf.completeExceptionally(toOidcException(event.cause()));
} else {
cf.complete(event.result());
}
}
});
auth = cf.join();
String endSessionEndpoint = OAuth2AuthProviderImpl.class.cast(auth).getConfig().getLogoutPath();
if (endSessionEndpoint == null && oidcConfig.logout.path.isPresent()) {
throw new RuntimeException(
"The application supports RP-Initiated Logout but the OpenID Provider does not advertise the end_session_endpoint");
}
break;
} catch (Throwable throwable) {
while (throwable instanceof CompletionException && throwable.getCause() != null) {
throwable = throwable.getCause();
}
if (throwable instanceof OIDCException) {
if (i + 1 < connectionRetryCount) {
try {
Thread.sleep(2000);
} catch (InterruptedException iex) {
}
} else {
throw (OIDCException) throwable;
}
} else {
throw new OIDCException(throwable);
}
}
}
return new TenantConfigContext(auth, oidcConfig);
} | throw new RuntimeException( | private TenantConfigContext createTenantContext(Vertx vertx, OidcTenantConfig oidcConfig, String tenantId) {
if (!oidcConfig.tenantEnabled) {
LOG.debugf("%s tenant configuration is disabled", tenantId);
return null;
}
OAuth2ClientOptions options = new OAuth2ClientOptions();
if (oidcConfig.getClientId().isPresent()) {
options.setClientID(oidcConfig.getClientId().get());
}
if (oidcConfig.getToken().issuer.isPresent()) {
options.setValidateIssuer(false);
}
if (oidcConfig.getToken().getExpirationGrace().isPresent()) {
JWTOptions jwtOptions = new JWTOptions();
jwtOptions.setLeeway(oidcConfig.getToken().getExpirationGrace().get());
options.setJWTOptions(jwtOptions);
}
if (oidcConfig.getPublicKey().isPresent()) {
return createdTenantContextFromPublicKey(options, oidcConfig);
}
if (!oidcConfig.getAuthServerUrl().isPresent() || !oidcConfig.getClientId().isPresent()) {
throw new ConfigurationException(
"Both 'auth-server-url' and 'client-id' or alterntively 'public-key' must be configured"
+ " when the quarkus-oidc extension is enabled");
}
options.setSite(oidcConfig.getAuthServerUrl().get());
if (oidcConfig.getIntrospectionPath().isPresent()) {
options.setIntrospectionPath(oidcConfig.getIntrospectionPath().get());
}
if (oidcConfig.getJwksPath().isPresent()) {
options.setJwkPath(oidcConfig.getJwksPath().get());
}
Credentials creds = oidcConfig.getCredentials();
if (creds.secret.isPresent() && creds.clientSecret.value.isPresent()) {
throw new ConfigurationException(
"'credentials.secret' and 'credentials.client-secret' properties are mutually exclusive");
}
if ((creds.secret.isPresent() || creds.clientSecret.value.isPresent()) && creds.jwt.secret.isPresent()) {
throw new ConfigurationException(
"Use only 'credentials.secret' or 'credentials.client-secret' or 'credentials.jwt.secret' property");
}
if (creds.secret.isPresent() || creds.clientSecret.value.isPresent()
&& creds.clientSecret.method.orElseGet(() -> Secret.Method.BASIC) == Secret.Method.BASIC) {
options.setClientSecret(creds.secret.orElseGet(() -> creds.clientSecret.value.get()));
} else {
options.setClientSecretParameterName(null);
}
Optional<ProxyOptions> proxyOpt = toProxyOptions(oidcConfig.getProxy());
if (proxyOpt.isPresent()) {
options.setProxyOptions(proxyOpt.get());
}
if (oidcConfig.tls.verification == Verification.NONE) {
options.setTrustAll(true);
options.setVerifyHost(false);
}
final long connectionDelayInSecs = oidcConfig.getConnectionDelay().isPresent()
? oidcConfig.getConnectionDelay().get().toMillis() / 1000
: 0;
final long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1;
if (connectionRetryCount > 1) {
LOG.infof("Connecting to IDP for up to %d times every 2 seconds", connectionRetryCount);
}
OAuth2Auth auth = null;
for (long i = 0; i < connectionRetryCount; i++) {
try {
CompletableFuture<OAuth2Auth> cf = new CompletableFuture<>();
KeycloakAuth.discover(vertx, options, new Handler<AsyncResult<OAuth2Auth>>() {
@Override
public void handle(AsyncResult<OAuth2Auth> event) {
if (event.failed()) {
cf.completeExceptionally(toOidcException(event.cause()));
} else {
cf.complete(event.result());
}
}
});
auth = cf.join();
if (!ApplicationType.WEB_APP.equals(oidcConfig.applicationType)) {
if (oidcConfig.token.refreshExpired) {
throw new RuntimeException(
"The 'token.refresh-expired' property can only be enabled for " + ApplicationType.WEB_APP
+ " application types");
}
if (oidcConfig.logout.path.isPresent()) {
throw new RuntimeException(
"The 'logout.path' property can only be enabled for " + ApplicationType.WEB_APP
+ " application types");
}
}
String endSessionEndpoint = OAuth2AuthProviderImpl.class.cast(auth).getConfig().getLogoutPath();
if (oidcConfig.logout.path.isPresent()) {
if (!oidcConfig.endSessionPath.isPresent() && endSessionEndpoint == null) {
throw new RuntimeException(
"The application supports RP-Initiated Logout but the OpenID Provider does not advertise the end_session_endpoint");
}
}
break;
} catch (Throwable throwable) {
while (throwable instanceof CompletionException && throwable.getCause() != null) {
throwable = throwable.getCause();
}
if (throwable instanceof OIDCException) {
if (i + 1 < connectionRetryCount) {
try {
Thread.sleep(2000);
} catch (InterruptedException iex) {
}
} else {
throw (OIDCException) throwable;
}
} else {
throw new OIDCException(throwable);
}
}
}
return new TenantConfigContext(auth, oidcConfig);
} | class OidcRecorder {
private static final Logger LOG = Logger.getLogger(OidcRecorder.class);
public Supplier<TenantConfigBean> setup(OidcConfig config, Supplier<Vertx> vertx) {
final Vertx vertxValue = vertx.get();
Map<String, TenantConfigContext> tenantsConfig = new HashMap<>();
for (Map.Entry<String, OidcTenantConfig> tenant : config.namedTenants.entrySet()) {
if (config.defaultTenant.getTenantId().isPresent()
&& tenant.getKey().equals(config.defaultTenant.getTenantId().get())) {
throw new OIDCException("tenant-id '" + tenant.getKey() + "' duplicates the default tenant-id");
}
if (tenant.getValue().getTenantId().isPresent() && !tenant.getKey().equals(tenant.getValue().getTenantId().get())) {
throw new OIDCException("Configuration has 2 different tenant-id values: '"
+ tenant.getKey() + "' and '" + tenant.getValue().getTenantId().get() + "'");
}
tenantsConfig.put(tenant.getKey(), createTenantContext(vertxValue, tenant.getValue(), tenant.getKey()));
}
TenantConfigContext tenantContext = createTenantContext(vertxValue, config.defaultTenant, "Default");
return new Supplier<TenantConfigBean>() {
@Override
public TenantConfigBean get() {
return new TenantConfigBean(tenantsConfig, tenantContext,
new Function<OidcTenantConfig, TenantConfigContext>() {
@Override
public TenantConfigContext apply(OidcTenantConfig config) {
return createTenantContext(vertxValue, config, config.getTenantId().get());
}
});
}
};
}
protected static OIDCException toOidcException(Throwable cause) {
final String message = "OIDC server is not available at the 'quarkus.oidc.auth-server-url' URL. "
+ "Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:"
+ " 'https:
return new OIDCException(message, cause);
}
protected static Optional<ProxyOptions> toProxyOptions(OidcTenantConfig.Proxy proxyConfig) {
if (!proxyConfig.host.isPresent()) {
return Optional.empty();
}
JsonObject jsonOptions = new JsonObject();
jsonOptions.put("host", proxyConfig.host.get());
jsonOptions.put("port", proxyConfig.port);
if (proxyConfig.username.isPresent()) {
jsonOptions.put("username", proxyConfig.username.get());
}
if (proxyConfig.password.isPresent()) {
jsonOptions.put("password", proxyConfig.password.get());
}
return Optional.of(new ProxyOptions(jsonOptions));
}
} | class OidcRecorder {
private static final Logger LOG = Logger.getLogger(OidcRecorder.class);
public Supplier<TenantConfigBean> setup(OidcConfig config, Supplier<Vertx> vertx) {
final Vertx vertxValue = vertx.get();
Map<String, TenantConfigContext> tenantsConfig = new HashMap<>();
for (Map.Entry<String, OidcTenantConfig> tenant : config.namedTenants.entrySet()) {
if (config.defaultTenant.getTenantId().isPresent()
&& tenant.getKey().equals(config.defaultTenant.getTenantId().get())) {
throw new OIDCException("tenant-id '" + tenant.getKey() + "' duplicates the default tenant-id");
}
if (tenant.getValue().getTenantId().isPresent() && !tenant.getKey().equals(tenant.getValue().getTenantId().get())) {
throw new OIDCException("Configuration has 2 different tenant-id values: '"
+ tenant.getKey() + "' and '" + tenant.getValue().getTenantId().get() + "'");
}
tenantsConfig.put(tenant.getKey(), createTenantContext(vertxValue, tenant.getValue(), tenant.getKey()));
}
TenantConfigContext tenantContext = createTenantContext(vertxValue, config.defaultTenant, "Default");
return new Supplier<TenantConfigBean>() {
@Override
public TenantConfigBean get() {
return new TenantConfigBean(tenantsConfig, tenantContext,
new Function<OidcTenantConfig, TenantConfigContext>() {
@Override
public TenantConfigContext apply(OidcTenantConfig config) {
return createTenantContext(vertxValue, config, config.getTenantId().get());
}
});
}
};
}
@SuppressWarnings("deprecation")
private TenantConfigContext createdTenantContextFromPublicKey(OAuth2ClientOptions options, OidcTenantConfig oidcConfig) {
if (oidcConfig.applicationType == ApplicationType.WEB_APP) {
throw new ConfigurationException("'public-key' property can only be used with the 'service' applications");
}
LOG.debug("'public-key' property for the local token verification is set,"
+ " no connection to the OIDC server will be created");
options.addPubSecKey(new PubSecKeyOptions()
.setAlgorithm("RS256")
.setPublicKey(oidcConfig.getPublicKey().get()));
return new TenantConfigContext(new OAuth2AuthProviderImpl(null, options), oidcConfig);
}
protected static OIDCException toOidcException(Throwable cause) {
final String message = "OIDC server is not available at the 'quarkus.oidc.auth-server-url' URL. "
+ "Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:"
+ " 'https:
return new OIDCException(message, cause);
}
protected static Optional<ProxyOptions> toProxyOptions(OidcTenantConfig.Proxy proxyConfig) {
if (!proxyConfig.host.isPresent()) {
return Optional.empty();
}
JsonObject jsonOptions = new JsonObject();
jsonOptions.put("host", proxyConfig.host.get());
jsonOptions.put("port", proxyConfig.port);
if (proxyConfig.username.isPresent()) {
jsonOptions.put("username", proxyConfig.username.get());
}
if (proxyConfig.password.isPresent()) {
jsonOptions.put("password", proxyConfig.password.get());
}
return Optional.of(new ProxyOptions(jsonOptions));
}
} |
I know we said Java might diverge on this (doing file for sync, url for async), but I think that because you've added a recognizeReceipts and a recognizeReceipts FromURL, you can have the other sync and async functions call the files functions | public static void main(final String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("https:
.buildAsyncClient();
PollerFlux<OperationResult, IterableStream<FormPage>> recognizeLayoutPoller =
client.beginRecognizeContentFromUrl("file_source_url");
Mono<IterableStream<FormPage>> layoutPageResults = recognizeLayoutPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
layoutPageResults.subscribe(formPages -> formPages.forEach(formPage -> {
System.out.println("----Recognizing content ----");
System.out.printf("Has width: %s and height: %s, measured with unit: %s%n", formPage.getWidth(),
formPage.getHeight(),
formPage.getUnit());
formPage.getTables().forEach(formTable -> {
System.out.printf("Table has %s rows and %s columns.%n", formTable.getRowCount(),
formTable.getColumnCount());
formTable.getCells().forEach(formTableCell -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formTableCell.getBoundingBox() != null) {
formTableCell.getBoundingBox().getPoints().forEach(point ->
boundingBoxStr.append(String.format("[%s, %s]", point.getX(), point.getY())));
}
System.out.printf("Cell has text %s, within bounding box %s.%n", formTableCell.getText(),
boundingBoxStr);
});
System.out.println();
});
}));
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | client.beginRecognizeContentFromUrl("file_source_url"); | public static void main(final String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("https:
.buildAsyncClient();
PollerFlux<OperationResult, IterableStream<FormPage>> recognizeLayoutPoller =
client.beginRecognizeContentFromUrl("https:
Mono<IterableStream<FormPage>> layoutPageResults = recognizeLayoutPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
layoutPageResults.subscribe(formPages -> formPages.forEach(formPage -> {
System.out.println("----Recognizing content ----");
System.out.printf("Has width: %s and height: %s, measured with unit: %s%n", formPage.getWidth(),
formPage.getHeight(),
formPage.getUnit());
formPage.getTables().forEach(formTable -> {
System.out.printf("Table has %s rows and %s columns.%n", formTable.getRowCount(),
formTable.getColumnCount());
formTable.getCells().forEach(formTableCell -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formTableCell.getBoundingBox() != null) {
formTableCell.getBoundingBox().getPoints().forEach(point ->
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(), point.getY())));
}
System.out.printf("Cell has text %s, within bounding box %s.%n", formTableCell.getText(),
boundingBoxStr);
});
System.out.println();
});
}));
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class RecognizeContentAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | class RecognizeContentAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} |
@steveniemitz Please follow up with a separate PR if this is not addressed. | public List<String> detect(ClassLoader classLoader) {
List<File> classpathContents =
classGraph
.disableNestedJarScanning()
.addClassLoader(classLoader)
.scan(1)
.getClasspathFiles();
return classpathContents.stream().map(File::getAbsolutePath).collect(Collectors.toList());
} | .scan(1) | public List<String> detect(ClassLoader classLoader) {
List<File> classpathContents =
classGraph
.disableNestedJarScanning()
.addClassLoader(classLoader)
.scan(1)
.getClasspathFiles();
return classpathContents.stream().map(File::getAbsolutePath).collect(Collectors.toList());
} | class loader uses.
*/
@Override | class loader uses.
*/
@Override |
en, don't effect, because analyze will use real name | public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!super.equals(o)) {
return false;
}
SubfieldExpr that = (SubfieldExpr) o;
return Objects.equals(fieldNames, that.fieldNames);
} | return Objects.equals(fieldNames, that.fieldNames); | public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!super.equals(o)) {
return false;
}
SubfieldExpr that = (SubfieldExpr) o;
return Objects.equals(fieldNames, that.fieldNames);
} | class SubfieldExpr extends Expr {
private List<String> fieldNames;
public SubfieldExpr(Expr child, List<String> fieldNames) {
this(child, null, fieldNames);
}
public SubfieldExpr(Expr child, List<String> fieldNames, NodePosition pos) {
this(child, null, fieldNames, pos);
}
public SubfieldExpr(Expr child, Type type, List<String> fieldNames) {
this(child, type, fieldNames, NodePosition.ZERO);
}
public SubfieldExpr(Expr child, Type type, List<String> fieldNames, NodePosition pos) {
super(pos);
if (type != null) {
Preconditions.checkArgument(child.getType().isStructType());
}
children.add(child);
this.type = type;
this.fieldNames = ImmutableList.copyOf(fieldNames);
}
public SubfieldExpr(SubfieldExpr other) {
super(other);
fieldNames = other.fieldNames;
}
public void setFieldNames(List<String> fieldNames) {
this.fieldNames = ImmutableList.copyOf(fieldNames);
}
public List<String> getFieldNames() {
return fieldNames;
}
public <R, C> R accept(AstVisitor<R, C> visitor, C context) {
return visitor.visitSubfieldExpr(this, context);
}
@Override
protected void analyzeImpl(Analyzer analyzer) throws AnalysisException {
Preconditions.checkState(false, "unreachable");
}
@Override
protected String toSqlImpl() {
return getChild(0).toSqlImpl() + "." + Joiner.on('.').join(fieldNames);
}
@Override
protected void toThrift(TExprNode msg) {
msg.setNode_type(TExprNodeType.SUBFIELD_EXPR);
msg.setUsed_subfield_names(fieldNames);
}
@Override
public Expr clone() {
return new SubfieldExpr(this);
}
@Override
public boolean isSelfMonotonic() {
return children.get(0).isSelfMonotonic();
}
@Override
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), fieldNames);
}
} | class SubfieldExpr extends Expr {
private List<String> fieldNames;
public SubfieldExpr(Expr child, List<String> fieldNames) {
this(child, null, fieldNames);
}
public SubfieldExpr(Expr child, List<String> fieldNames, NodePosition pos) {
this(child, null, fieldNames, pos);
}
public SubfieldExpr(Expr child, Type type, List<String> fieldNames) {
this(child, type, fieldNames, NodePosition.ZERO);
}
public SubfieldExpr(Expr child, Type type, List<String> fieldNames, NodePosition pos) {
super(pos);
if (type != null) {
Preconditions.checkArgument(child.getType().isStructType());
}
children.add(child);
this.type = type;
this.fieldNames = ImmutableList.copyOf(fieldNames);
}
public SubfieldExpr(SubfieldExpr other) {
super(other);
fieldNames = other.fieldNames;
}
public void setFieldNames(List<String> fieldNames) {
this.fieldNames = ImmutableList.copyOf(fieldNames);
}
public List<String> getFieldNames() {
return fieldNames;
}
public <R, C> R accept(AstVisitor<R, C> visitor, C context) {
return visitor.visitSubfieldExpr(this, context);
}
@Override
protected void analyzeImpl(Analyzer analyzer) throws AnalysisException {
Preconditions.checkState(false, "unreachable");
}
@Override
protected String toSqlImpl() {
return getChild(0).toSqlImpl() + "." + Joiner.on('.').join(fieldNames);
}
@Override
protected void toThrift(TExprNode msg) {
msg.setNode_type(TExprNodeType.SUBFIELD_EXPR);
msg.setUsed_subfield_names(fieldNames);
}
@Override
public Expr clone() {
return new SubfieldExpr(this);
}
@Override
public boolean isSelfMonotonic() {
return children.get(0).isSelfMonotonic();
}
@Override
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), fieldNames);
}
} |
I think it's a good suggestion. Done. | public CheckpointableKeyedStateBackend<K> getRestoredKeyedStateBackend() {
return new AbstractKeyedStateBackend<K>(keyedStateBackend) {
@Override
public void setCurrentKey(K newKey) {
keyedStateBackend.setCurrentKey(newKey);
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
keyedStateBackend.notifyCheckpointComplete(checkpointId);
}
@Nonnull
@Override
public SavepointResources<K> savepoint() throws Exception {
return keyedStateBackend.savepoint();
}
@Override
public int numKeyValueStateEntries() {
return keyedStateBackend.numKeyValueStateEntries();
}
@Override
public <N> Stream<K> getKeys(String state, N namespace) {
return keyedStateBackend.getKeys(state, namespace);
}
@Override
public <N> Stream<Tuple2<K, N>> getKeysAndNamespaces(String state) {
return keyedStateBackend.getKeysAndNamespaces(state);
}
@Nonnull
@Override
public <N, SV, SEV, S extends State, IS extends S> IS createInternalState(
@Nonnull TypeSerializer<N> namespaceSerializer,
@Nonnull StateDescriptor<S, SV> stateDesc,
@Nonnull
StateSnapshotTransformer.StateSnapshotTransformFactory<SEV>
snapshotTransformFactory)
throws Exception {
return keyedStateBackend.createInternalState(
namespaceSerializer, stateDesc, snapshotTransformFactory);
}
@Override
public <N, S extends State> S getPartitionedState(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, ?> stateDescriptor)
throws Exception {
S partitionedState =
keyedStateBackend.getPartitionedState(
namespace, namespaceSerializer, stateDescriptor);
functionDelegationHelper.addOrUpdate(stateDescriptor);
return partitionedState;
}
@Override
public <N, S extends State, V> S getOrCreateKeyedState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, V> stateDescriptor)
throws Exception {
S keyedState =
keyedStateBackend.getOrCreateKeyedState(
namespaceSerializer, stateDescriptor);
functionDelegationHelper.addOrUpdate(stateDescriptor);
return keyedState;
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
ChangelogKeyGroupedPriorityQueue<T> existingState =
(ChangelogKeyGroupedPriorityQueue<T>)
changelogStateFactory.getExistingState(
stateName,
StateMetaInfoSnapshot.BackendStateType.PRIORITY_QUEUE);
return existingState == null
? keyedStateBackend.create(stateName, byteOrderedElementSerializer)
: existingState;
}
@Nonnull
@Override
public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot(
long checkpointId,
long timestamp,
@Nonnull CheckpointStreamFactory streamFactory,
@Nonnull CheckpointOptions checkpointOptions)
throws Exception {
return keyedStateBackend.snapshot(
checkpointId, timestamp, streamFactory, checkpointOptions);
}
@Override
public void dispose() {
super.dispose();
changelogStateFactory.dispose();
}
};
} | @Nonnull TypeSerializer<T> byteOrderedElementSerializer) { | public CheckpointableKeyedStateBackend<K> getRestoredKeyedStateBackend() {
return wrapKeyedStateBackend(
keyedStateBackend, changelogStateFactory, functionDelegationHelper);
} | class ChangelogMigrationRestoreTarget<K> implements ChangelogRestoreTarget<K> {
private final AbstractKeyedStateBackend<K> keyedStateBackend;
private final ChangelogStateFactory changelogStateFactory;
private final FunctionDelegationHelper functionDelegationHelper =
new FunctionDelegationHelper();
public ChangelogMigrationRestoreTarget(
AbstractKeyedStateBackend<K> keyedStateBackend,
ChangelogStateFactory changelogStateFactory) {
this.keyedStateBackend = keyedStateBackend;
this.changelogStateFactory = changelogStateFactory;
}
@Override
public KeyGroupRange getKeyGroupRange() {
return keyedStateBackend.getKeyGroupRange();
}
@Override
@SuppressWarnings("unchecked")
public <N, S extends State, V> S createKeyedState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, V> stateDescriptor)
throws Exception {
S keyedState =
keyedStateBackend.getOrCreateKeyedState(namespaceSerializer, stateDescriptor);
functionDelegationHelper.addOrUpdate(stateDescriptor);
final InternalKvState<K, N, V> kvState = (InternalKvState<K, N, V>) keyedState;
ChangelogState changelogState =
changelogStateFactory.create(stateDescriptor, kvState, keyedStateBackend);
return (S) changelogState;
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> createPqState(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
ChangelogKeyGroupedPriorityQueue<T> queue =
(ChangelogKeyGroupedPriorityQueue<T>)
changelogStateFactory.getExistingState(
stateName, StateMetaInfoSnapshot.BackendStateType.PRIORITY_QUEUE);
if (queue == null) {
KeyGroupedInternalPriorityQueue<T> internalPriorityQueue =
keyedStateBackend.create(stateName, byteOrderedElementSerializer);
queue =
changelogStateFactory.create(
stateName, internalPriorityQueue, byteOrderedElementSerializer);
}
return queue;
}
@Override
public ChangelogState getExistingState(
String name, StateMetaInfoSnapshot.BackendStateType type) {
return changelogStateFactory.getExistingState(name, type);
}
@Override
} | class ChangelogMigrationRestoreTarget<K> implements ChangelogRestoreTarget<K> {
private final AbstractKeyedStateBackend<K> keyedStateBackend;
private final ChangelogStateFactory changelogStateFactory;
private final FunctionDelegationHelper functionDelegationHelper =
new FunctionDelegationHelper();
public ChangelogMigrationRestoreTarget(
AbstractKeyedStateBackend<K> keyedStateBackend,
ChangelogStateFactory changelogStateFactory) {
this.keyedStateBackend = keyedStateBackend;
this.changelogStateFactory = changelogStateFactory;
}
@Override
public KeyGroupRange getKeyGroupRange() {
return keyedStateBackend.getKeyGroupRange();
}
@Override
@SuppressWarnings("unchecked")
public <N, S extends State, V> S createKeyedState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, V> stateDescriptor)
throws Exception {
S keyedState =
keyedStateBackend.getOrCreateKeyedState(namespaceSerializer, stateDescriptor);
functionDelegationHelper.addOrUpdate(stateDescriptor);
final InternalKvState<K, N, V> kvState = (InternalKvState<K, N, V>) keyedState;
ChangelogState changelogState =
changelogStateFactory.create(
stateDescriptor,
kvState,
VoidStateChangeLogger.getInstance(),
keyedStateBackend);
return (S) changelogState;
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> createPqState(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
ChangelogKeyGroupedPriorityQueue<T> queue =
(ChangelogKeyGroupedPriorityQueue<T>)
changelogStateFactory.getExistingState(
stateName, StateMetaInfoSnapshot.BackendStateType.PRIORITY_QUEUE);
if (queue == null) {
KeyGroupedInternalPriorityQueue<T> internalPriorityQueue =
keyedStateBackend.create(stateName, byteOrderedElementSerializer);
queue =
changelogStateFactory.create(
stateName,
internalPriorityQueue,
VoidStateChangeLogger.getInstance(),
byteOrderedElementSerializer);
}
return queue;
}
@Override
public ChangelogState getExistingState(
String name, StateMetaInfoSnapshot.BackendStateType type) {
return changelogStateFactory.getExistingState(name, type);
}
@Override
private static class VoidStateChangeLogger<Value, Namespace>
implements KvStateChangeLogger<Value, Namespace>, StateChangeLogger<Value, Namespace> {
private static final VoidStateChangeLogger<Object, Object> INSTANCE =
new VoidStateChangeLogger<>();
@SuppressWarnings("unchecked")
public static <Value, Namespace> VoidStateChangeLogger<Value, Namespace> getInstance() {
return (VoidStateChangeLogger<Value, Namespace>) INSTANCE;
}
private VoidStateChangeLogger() {}
@Override
public void namespacesMerged(Namespace target, Collection<Namespace> sources)
throws IOException {}
@Override
public void valueUpdated(Value newValue, Namespace ns) throws IOException {}
@Override
public void valueUpdatedInternal(Value newValue, Namespace ns) throws IOException {}
@Override
public void valueAdded(Value addedValue, Namespace ns) throws IOException {}
@Override
public void valueCleared(Namespace ns) throws IOException {}
@Override
public void valueElementAdded(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataSerializer,
Namespace ns)
throws IOException {}
@Override
public void valueElementAddedOrUpdated(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataSerializer,
Namespace ns)
throws IOException {}
@Override
public void valueElementRemoved(
ThrowingConsumer<DataOutputViewStreamWrapper, IOException> dataSerializer,
Namespace ns)
throws IOException {}
@Override
public void resetWritingMetaFlag() {}
@Override
public void close() throws IOException {}
}
private static <K> AbstractKeyedStateBackend<K> wrapKeyedStateBackend(
AbstractKeyedStateBackend<K> keyedStateBackend,
ChangelogStateFactory changelogStateFactory,
FunctionDelegationHelper functionDelegationHelper) {
return new AbstractKeyedStateBackend<K>(keyedStateBackend) {
@Override
public void setCurrentKey(K newKey) {
keyedStateBackend.setCurrentKey(newKey);
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
keyedStateBackend.notifyCheckpointComplete(checkpointId);
}
@Nonnull
@Override
public SavepointResources<K> savepoint() throws Exception {
return keyedStateBackend.savepoint();
}
@Override
public int numKeyValueStateEntries() {
return keyedStateBackend.numKeyValueStateEntries();
}
@Override
public <N> Stream<K> getKeys(String state, N namespace) {
return keyedStateBackend.getKeys(state, namespace);
}
@Override
public <N> Stream<Tuple2<K, N>> getKeysAndNamespaces(String state) {
return keyedStateBackend.getKeysAndNamespaces(state);
}
@Nonnull
@Override
public <N, SV, SEV, S extends State, IS extends S> IS createInternalState(
@Nonnull TypeSerializer<N> namespaceSerializer,
@Nonnull StateDescriptor<S, SV> stateDesc,
@Nonnull
StateSnapshotTransformer.StateSnapshotTransformFactory<SEV>
snapshotTransformFactory)
throws Exception {
return keyedStateBackend.createInternalState(
namespaceSerializer, stateDesc, snapshotTransformFactory);
}
@Override
public <N, S extends State> S getPartitionedState(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, ?> stateDescriptor)
throws Exception {
S partitionedState =
keyedStateBackend.getPartitionedState(
namespace, namespaceSerializer, stateDescriptor);
functionDelegationHelper.addOrUpdate(stateDescriptor);
return partitionedState;
}
@Override
public <N, S extends State, V> S getOrCreateKeyedState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, V> stateDescriptor)
throws Exception {
S keyedState =
keyedStateBackend.getOrCreateKeyedState(
namespaceSerializer, stateDescriptor);
functionDelegationHelper.addOrUpdate(stateDescriptor);
return keyedState;
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
ChangelogKeyGroupedPriorityQueue<T> existingState =
(ChangelogKeyGroupedPriorityQueue<T>)
changelogStateFactory.getExistingState(
stateName,
StateMetaInfoSnapshot.BackendStateType.PRIORITY_QUEUE);
return existingState == null
? keyedStateBackend.create(stateName, byteOrderedElementSerializer)
: existingState;
}
@Nonnull
@Override
public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot(
long checkpointId,
long timestamp,
@Nonnull CheckpointStreamFactory streamFactory,
@Nonnull CheckpointOptions checkpointOptions)
throws Exception {
return keyedStateBackend.snapshot(
checkpointId, timestamp, streamFactory, checkpointOptions);
}
@Override
public void dispose() {
keyedStateBackend.dispose();
changelogStateFactory.dispose();
}
};
}
} |
We can add a negative test like ```ballerina record { byte i; int j?; } rec = let int v = 160 in {i: v}; ``` Without this fix, the error was something like ``` incompatible types: expected 'record {| byte i; int j?; anydata...; |}', found 'record {| int i; |}' ``` With this fix I believe it'll be something like ``` incompatible types: expected 'byte', found 'int' ``` | public void testLetExpressionNegative() {
int i = 0;
BAssertUtil.validateError(negativeResult, i++, "redeclared symbol 'x'", 19, 21);
BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'y'", 23, 27);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 27, 25);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 28, 39);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'int'", 29, 28);
BAssertUtil.validateError(negativeResult, i, "incompatible types: expected 'string', found 'int'", 30, 42);
} | BAssertUtil.validateError(negativeResult, i, "incompatible types: expected 'string', found 'int'", 30, 42); | public void testLetExpressionNegative() {
int i = 0;
BAssertUtil.validateError(negativeResult, i++, "redeclared symbol 'x'", 19, 21);
BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'y'", 23, 27);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 27, 25);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 28, 39);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'int'", 29, 28);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'int'", 30, 42);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'byte', found 'int'", 41, 37);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 46, 48);
BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'float'", 48, 14);
BAssertUtil.validateError(negativeResult, i, "too many arguments in call to 'new()'", 57, 37);
} | class LetExpressionTest {
private CompileResult compileResult, negativeResult, notSupportedResult;
@BeforeClass
public void setup() {
compileResult = BCompileUtil.compile("test-src/expressions/let/let-expression-test.bal");
negativeResult = BCompileUtil.compile("test-src/expressions/let/let-expression-negative.bal");
notSupportedResult = BCompileUtil.compile("test-src/expressions/let/let-not-supported.bal");
}
@Test(description = "Positive tests for let expression", dataProvider = "FunctionList")
public void testLetExpression(String funcName) {
BRunUtil.invoke(compileResult, funcName);
}
@Test(description = "Negative test cases for let expression")
@Test(description = "Test cases for scenarios where let expression is not yet supported")
public void testLetExpressionNotSupported() {
int i = 0;
BAssertUtil.validateError(notSupportedResult, i++, "let expressions are not yet supported for record fields",
18, 17);
BAssertUtil.validateError(notSupportedResult, i, "let expressions are not yet supported for object fields",
22, 22);
}
@DataProvider(name = "FunctionList")
public Object[][] getTestFunctions() {
return new Object[][]{
{"testBasicLetExpr"},
{"testBasicLetExprVar"},
{"testMultipleVarDeclLetExpr"},
{"testFunctionCallInVarDeclLetExpr"},
{"testFunctionCallInLetExpr"},
{"testMultipleVarDeclReuseLetExpr"},
{"testGloballyDefinedLetExpr"},
{"testLetExprAsFunctionArg"},
{"testLetExprInIfStatement"},
{"testLetExprInWhileStatement"},
{"testLetExprInCompoundStatement"},
{"testLetExpressionInMatch"},
{"testLetExpressionInReturn"},
{"testLetExprInElvis"},
{"testLetExprInUnion"},
{"testLetExprInJSON"},
{"testLetExpresionInArrays"},
{"testLetExpresionInTuples"},
{"testLetExprInMap"},
{"testLetExpressionTupleSimple"},
{"testLetExpressionTupleBinding"},
{"testLetExpressionTupleComplex"},
{"testLetExpressionTupleBindingComplex"},
{"testLetExpressionTupleBindingRef"},
{"testLetExpressionRecordBindingSimple"},
{"testLetExpressionRecordBindingComplexVar"},
{"testAnonymousRecordWithLetExpression"}
};
}
@AfterClass
public void tearDown() {
compileResult = null;
negativeResult = null;
notSupportedResult = null;
}
} | class LetExpressionTest {
private CompileResult compileResult, negativeResult, notSupportedResult;
@BeforeClass
public void setup() {
compileResult = BCompileUtil.compile("test-src/expressions/let/let-expression-test.bal");
negativeResult = BCompileUtil.compile("test-src/expressions/let/let-expression-negative.bal");
notSupportedResult = BCompileUtil.compile("test-src/expressions/let/let-not-supported.bal");
}
@Test(description = "Positive tests for let expression", dataProvider = "FunctionList")
public void testLetExpression(String funcName) {
BRunUtil.invoke(compileResult, funcName);
}
@Test(description = "Negative test cases for let expression")
@Test(description = "Test cases for scenarios where let expression is not yet supported")
public void testLetExpressionNotSupported() {
int i = 0;
BAssertUtil.validateError(notSupportedResult, i++, "let expressions are not yet supported for record fields",
18, 17);
BAssertUtil.validateError(notSupportedResult, i, "let expressions are not yet supported for object fields",
22, 22);
}
@DataProvider(name = "FunctionList")
public Object[][] getTestFunctions() {
return new Object[][]{
{"testBasicLetExpr"},
{"testBasicLetExprVar"},
{"testMultipleVarDeclLetExpr"},
{"testFunctionCallInVarDeclLetExpr"},
{"testFunctionCallInLetExpr"},
{"testMultipleVarDeclReuseLetExpr"},
{"testGloballyDefinedLetExpr"},
{"testLetExprAsFunctionArg"},
{"testLetExprInIfStatement"},
{"testLetExprInWhileStatement"},
{"testLetExprInCompoundStatement"},
{"testLetExpressionInMatch"},
{"testLetExpressionInReturn"},
{"testLetExprInElvis"},
{"testLetExprInUnion"},
{"testLetExprInJSON"},
{"testLetExpresionInArrays"},
{"testLetExpresionInTuples"},
{"testLetExprInMap"},
{"testLetExpressionTupleSimple"},
{"testLetExpressionTupleBinding"},
{"testLetExpressionTupleComplex"},
{"testLetExpressionTupleBindingComplex"},
{"testLetExpressionTupleBindingRef"},
{"testLetExpressionRecordBindingSimple"},
{"testLetExpressionRecordBindingComplexVar"},
{"testAnonymousRecordWithLetExpression"},
{"testRecordWithLetExpression"},
{"testLetWithClass"}
};
}
@AfterClass
public void tearDown() {
compileResult = null;
negativeResult = null;
notSupportedResult = null;
}
} |
Agreed to merge in offline discussion | public void requireThatConnectionIsClosedAfterXRequests() throws Exception {
final int MAX_REQUESTS = 10;
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
ConnectorConfig.Builder connectorConfig = new ConnectorConfig.Builder()
.maxRequestsPerConnection(MAX_REQUESTS)
.ssl(new ConnectorConfig.Ssl.Builder()
.enabled(true)
.clientAuth(ConnectorConfig.Ssl.ClientAuth.Enum.NEED_AUTH)
.privateKeyFile(privateKeyFile.toString())
.certificateFile(certificateFile.toString())
.caCertificateFile(certificateFile.toString()));
ServerConfig.Builder serverConfig = new ServerConfig.Builder()
.connectionLog(new ServerConfig.ConnectionLog.Builder().enabled(true));
JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
serverConfig,
connectorConfig,
binder -> {});
for (int i = 0; i < MAX_REQUESTS - 1; i++) {
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectNoHeader(CONNECTION);
}
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectHeader(CONNECTION, is(CLOSE));
try (CloseableHttpAsyncClient client = createHttp2Client(driver)) {
String uri = "https:
for (int i = 0; i < MAX_REQUESTS - 1; i++) {
SimpleHttpResponse response = client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
assertEquals(OK, response.getCode());
}
try {
client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
fail();
} catch (ExecutionException e) {
assertEquals(e.getMessage(), "org.apache.hc.core5.http2.H2StreamResetException: Stream refused");
}
}
assertTrue(driver.close());
} | public void requireThatConnectionIsClosedAfterXRequests() throws Exception {
final int MAX_REQUESTS = 10;
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
ConnectorConfig.Builder connectorConfig = new ConnectorConfig.Builder()
.maxRequestsPerConnection(MAX_REQUESTS)
.ssl(new ConnectorConfig.Ssl.Builder()
.enabled(true)
.clientAuth(ConnectorConfig.Ssl.ClientAuth.Enum.NEED_AUTH)
.privateKeyFile(privateKeyFile.toString())
.certificateFile(certificateFile.toString())
.caCertificateFile(certificateFile.toString()));
ServerConfig.Builder serverConfig = new ServerConfig.Builder()
.connectionLog(new ServerConfig.ConnectionLog.Builder().enabled(true));
JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
serverConfig,
connectorConfig,
binder -> {});
for (int i = 0; i < MAX_REQUESTS - 1; i++) {
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectNoHeader(CONNECTION);
}
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectHeader(CONNECTION, is(CLOSE));
try (CloseableHttpAsyncClient client = createHttp2Client(driver)) {
String uri = "https:
for (int i = 0; i < MAX_REQUESTS - 1; i++) {
SimpleHttpResponse response = client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
assertEquals(OK, response.getCode());
}
try {
client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
fail();
} catch (ExecutionException e) {
assertEquals(e.getMessage(), "org.apache.hc.core5.http2.H2StreamResetException: Stream refused");
}
}
assertTrue(driver.close());
} | class HttpServerTest {
@Rule
public TemporaryFolder tmpFolder = new TemporaryFolder();
@Test
public void requireThatServerCanListenToRandomPort() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(mockRequestHandler());
assertNotEquals(0, driver.server().getListenPort());
assertTrue(driver.close());
}
@Test
public void requireThatServerCanNotListenToBoundPort() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(mockRequestHandler());
try {
JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder()
.listenPort(driver.server().getListenPort())
);
} catch (final Throwable t) {
assertThat(t.getCause(), instanceOf(BindException.class));
}
assertTrue(driver.close());
}
@Test
public void requireThatBindingSetNotFoundReturns404() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder()
.developerMode(true),
new ConnectorConfig.Builder(),
newBindingSetSelector("unknown"));
driver.client().get("/status.html")
.expectStatusCode(is(NOT_FOUND))
.expectContent(containsPattern(Pattern.compile(
Pattern.quote(BindingSetNotFoundException.class.getName()) +
": No binding set named 'unknown'\\.\n\tat .+",
Pattern.DOTALL | Pattern.MULTILINE)));
assertTrue(driver.close());
}
@Test
public void requireThatTooLongInitLineReturns414() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder()
.requestHeaderSize(1));
driver.client().get("/status.html")
.expectStatusCode(is(REQUEST_URI_TOO_LONG));
assertTrue(driver.close());
}
@Test
public void requireThatAccessLogIsCalledForRequestRejectedByJetty() throws Exception {
BlockingQueueRequestLog requestLogMock = new BlockingQueueRequestLog();
final JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder().requestHeaderSize(1),
binder -> binder.bind(RequestLog.class).toInstance(requestLogMock));
driver.client().get("/status.html")
.expectStatusCode(is(REQUEST_URI_TOO_LONG));
RequestLogEntry entry = requestLogMock.poll(Duration.ofSeconds(5));
assertEquals(414, entry.statusCode().getAsInt());
assertThat(driver.close(), is(true));
}
@Test
public void requireThatServerCanEcho() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatServerCanEchoCompressed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
SimpleHttpClient client = driver.newClient(true);
client.get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatServerCanHandleMultipleRequests() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
driver.client().get("/status.html")
.expectStatusCode(is(OK));
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWorks() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final String requestContent = generateContent('a', 30);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent(requestContent)
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith('{' + requestContent + "=[]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostDoesNotRemoveContentByDefault() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{foo=[bar]}foo=bar"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostKeepsContentWhenConfiguredTo() throws Exception {
final JettyTestDriver driver = newDriverWithFormPostContentRemoved(new ParameterPrinterRequestHandler(), false);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{foo=[bar]}foo=bar"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostRemovesContentWhenConfiguredTo() throws Exception {
final JettyTestDriver driver = newDriverWithFormPostContentRemoved(new ParameterPrinterRequestHandler(), true);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{foo=[bar]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWithCharsetSpecifiedWorks() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final String requestContent = generateContent('a', 30);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(X_DISABLE_CHUNKING, "true")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED + ";charset=UTF-8")
.setContent(requestContent)
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith('{' + requestContent + "=[]}"));
assertTrue(driver.close());
}
@Test
public void requireThatEmptyFormPostWorks() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormParametersAreParsed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("a=b&c=d")
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith("{a=[b], c=[d]}"));
assertTrue(driver.close());
}
@Test
public void requireThatUriParametersAreParsed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html?a=b&c=d")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{a=[b], c=[d]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormAndUriParametersAreMerged() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html?a=b&c=d1")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("c=d2&e=f")
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith("{a=[b], c=[d1, d2], e=[f]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormCharsetIsHonored() throws Exception {
final JettyTestDriver driver = newDriverWithFormPostContentRemoved(new ParameterPrinterRequestHandler(), true);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED + ";charset=ISO-8859-1")
.setBinaryContent(new byte[]{66, (byte) 230, 114, 61, 98, 108, (byte) 229})
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{B\u00e6r=[bl\u00e5]}"));
assertTrue(driver.close());
}
@Test
public void requireThatUnknownFormCharsetIsTreatedAsBadRequest() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED + ";charset=FLARBA-GARBA-7")
.setContent("a=b")
.execute();
response.expectStatusCode(is(UNSUPPORTED_MEDIA_TYPE));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWithPercentEncodedContentIsDecoded() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("%20%3D%C3%98=%22%25+")
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith("{ =\u00d8=[\"% ]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWithThrowingHandlerIsExceptionSafe() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ThrowingHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("a=b")
.execute();
response.expectStatusCode(is(INTERNAL_SERVER_ERROR));
assertTrue(driver.close());
}
@Test
public void requireThatMultiPostWorks() throws Exception {
final String startTxtContent = "this is a test for POST.";
final String updaterConfContent
= "identifier = updater\n"
+ "server_type = gds\n";
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.setMultipartContent(
newFileBody("start.txt", startTxtContent),
newFileBody("updater.conf", updaterConfContent))
.execute();
response.expectStatusCode(is(OK))
.expectContent(containsString(startTxtContent))
.expectContent(containsString(updaterConfContent));
}
@Test
public void requireThatRequestCookiesAreReceived() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new CookiePrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(COOKIE, "foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(containsString("[foo=bar]"));
assertTrue(driver.close());
}
@Test
public void requireThatSetCookieHeaderIsCorrect() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new CookieSetterRequestHandler(
new Cookie("foo", "bar")
.setDomain(".localhost")
.setHttpOnly(true)
.setPath("/foopath")
.setSecure(true)));
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectHeader("Set-Cookie",
is("foo=bar; Path=/foopath; Domain=.localhost; Secure; HttpOnly"));
assertTrue(driver.close());
}
@Test
public void requireThatTimeoutWorks() throws Exception {
final UnresponsiveHandler requestHandler = new UnresponsiveHandler();
final JettyTestDriver driver = JettyTestDriver.newInstance(requestHandler);
driver.client().get("/status.html")
.expectStatusCode(is(GATEWAY_TIMEOUT));
ResponseDispatch.newInstance(OK).dispatch(requestHandler.responseHandler);
assertTrue(driver.close());
}
@Test
public void requireThatHeaderWithNullValueIsOmitted() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoWithHeaderRequestHandler("X-Foo", null));
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectNoHeader("X-Foo");
assertTrue(driver.close());
}
@Test
public void requireThatHeaderWithEmptyValueIsAllowed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoWithHeaderRequestHandler("X-Foo", ""));
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectHeader("X-Foo", is(""));
assertTrue(driver.close());
}
@Test
public void requireThatNoConnectionHeaderMeansKeepAliveInHttp11KeepAliveDisabled() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoWithHeaderRequestHandler(CONNECTION, CLOSE));
driver.client().get("/status.html")
.expectHeader(CONNECTION, is(CLOSE));
assertThat(driver.close(), is(true));
}
@Test
@Test
public void requireThatServerCanRespondToSslRequest() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
final JettyTestDriver driver = JettyTestDriver.newInstanceWithSsl(new EchoRequestHandler(), certificateFile, privateKeyFile, TlsClientAuth.WANT);
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatServerCanRespondToHttp2Request() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
MetricConsumerMock metricConsumer = new MetricConsumerMock();
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
JettyTestDriver driver = createSslTestDriver(certificateFile, privateKeyFile, metricConsumer, connectionLog);
try (CloseableHttpAsyncClient client = createHttp2Client(driver)) {
String uri = "https:
SimpleHttpResponse response = client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
assertNull(response.getBodyText());
assertEquals(OK, response.getCode());
}
assertTrue(driver.close());
ConnectionLogEntry entry = connectionLog.logEntries().get(0);
assertEquals("HTTP/2.0", entry.httpProtocol().get());
}
@Test
public void requireThatTlsClientAuthenticationEnforcerRejectsRequestsForNonWhitelistedPaths() throws IOException {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
JettyTestDriver driver = createSslWithTlsClientAuthenticationEnforcer(certificateFile, privateKeyFile);
SSLContext trustStoreOnlyCtx = new SslContextBuilder()
.withTrustStore(certificateFile)
.build();
new SimpleHttpClient(trustStoreOnlyCtx, driver.server().getListenPort(), false)
.get("/dummy.html")
.expectStatusCode(is(UNAUTHORIZED));
assertTrue(driver.close());
}
@Test
public void requireThatTlsClientAuthenticationEnforcerAllowsRequestForWhitelistedPaths() throws IOException {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
JettyTestDriver driver = JettyTestDriver.newInstanceWithSsl(new EchoRequestHandler(), certificateFile, privateKeyFile, TlsClientAuth.WANT);
SSLContext trustStoreOnlyCtx = new SslContextBuilder()
.withTrustStore(certificateFile)
.build();
new SimpleHttpClient(trustStoreOnlyCtx, driver.server().getListenPort(), false)
.get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatConnectedAtReturnsNonZero() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ConnectedAtRequestHandler());
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectContent(matchesPattern("\\d{13,}"));
assertThat(driver.close(), is(true));
}
@Test
public void requireThatGzipEncodingRequestsAreAutomaticallyDecompressed() throws Exception {
JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
String requestContent = generateContent('a', 30);
ResponseValidator response = driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setGzipContent(requestContent)
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith('{' + requestContent + "=[]}"));
assertTrue(driver.close());
}
@Test
public void requireThatResponseStatsAreCollected() throws Exception {
RequestTypeHandler handler = new RequestTypeHandler();
JettyTestDriver driver = JettyTestDriver.newInstance(handler);
HttpResponseStatisticsCollector statisticsCollector = ((AbstractHandlerContainer) driver.server().server().getHandler())
.getChildHandlerByClass(HttpResponseStatisticsCollector.class);
{
List<HttpResponseStatisticsCollector.StatisticsEntry> stats = statisticsCollector.takeStatistics();
assertEquals(0, stats.size());
}
{
driver.client().newPost("/status.html").execute();
var entry = waitForStatistics(statisticsCollector);
assertEquals("http", entry.scheme);
assertEquals("POST", entry.method);
assertEquals("http.status.2xx", entry.name);
assertEquals("write", entry.requestType);
assertEquals(1, entry.value);
}
{
driver.client().newGet("/status.html").execute();
var entry = waitForStatistics(statisticsCollector);
assertEquals("http", entry.scheme);
assertEquals("GET", entry.method);
assertEquals("http.status.2xx", entry.name);
assertEquals("read", entry.requestType);
assertEquals(1, entry.value);
}
{
handler.setRequestType(Request.RequestType.READ);
driver.client().newPost("/status.html").execute();
var entry = waitForStatistics(statisticsCollector);
assertEquals("Handler overrides request type", "read", entry.requestType);
}
assertTrue(driver.close());
}
private HttpResponseStatisticsCollector.StatisticsEntry waitForStatistics(HttpResponseStatisticsCollector
statisticsCollector) {
List<HttpResponseStatisticsCollector.StatisticsEntry> entries = Collections.emptyList();
int tries = 0;
while (entries.isEmpty() && tries < 10000) {
entries = statisticsCollector.takeStatistics();
if (entries.isEmpty())
try {Thread.sleep(100); } catch (InterruptedException e) {}
tries++;
}
assertEquals(1, entries.size());
return entries.get(0);
}
@Test
public void requireThatConnectionThrottleDoesNotBlockConnectionsBelowThreshold() throws Exception {
JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder()
.throttling(new Throttling.Builder()
.enabled(true)
.maxAcceptRate(10)
.maxHeapUtilization(1.0)
.maxConnections(10)));
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatConnectionIsTrackedInConnectionLog() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
Module overrideModule = binder -> binder.bind(ConnectionLog.class).toInstance(connectionLog);
JettyTestDriver driver = JettyTestDriver.newInstanceWithSsl(new OkRequestHandler(), certificateFile, privateKeyFile, TlsClientAuth.NEED, overrideModule);
int listenPort = driver.server().getListenPort();
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 1000; i++) {
builder.append(i);
}
byte[] content = builder.toString().getBytes();
for (int i = 0; i < 100; i++) {
driver.client().newPost("/status.html").setBinaryContent(content).execute()
.expectStatusCode(is(OK));
}
assertTrue(driver.close());
List<ConnectionLogEntry> logEntries = connectionLog.logEntries();
Assertions.assertThat(logEntries).hasSize(1);
ConnectionLogEntry logEntry = logEntries.get(0);
assertEquals(4, UUID.fromString(logEntry.id()).version());
Assertions.assertThat(logEntry.timestamp()).isAfter(Instant.EPOCH);
Assertions.assertThat(logEntry.requests()).hasValue(100L);
Assertions.assertThat(logEntry.responses()).hasValue(100L);
Assertions.assertThat(logEntry.peerAddress()).hasValue("127.0.0.1");
Assertions.assertThat(logEntry.localAddress()).hasValue("127.0.0.1");
Assertions.assertThat(logEntry.localPort()).hasValue(listenPort);
Assertions.assertThat(logEntry.httpBytesReceived()).hasValueSatisfying(value -> Assertions.assertThat(value).isGreaterThan(100000L));
Assertions.assertThat(logEntry.httpBytesSent()).hasValueSatisfying(value -> Assertions.assertThat(value).isGreaterThan(10000L));
Assertions.assertThat(logEntry.sslProtocol()).hasValueSatisfying(TlsContext.ALLOWED_PROTOCOLS::contains);
Assertions.assertThat(logEntry.sslPeerSubject()).hasValue("CN=localhost");
Assertions.assertThat(logEntry.sslCipherSuite()).hasValueSatisfying(cipher -> Assertions.assertThat(cipher).isNotBlank());
Assertions.assertThat(logEntry.sslSessionId()).hasValueSatisfying(sessionId -> Assertions.assertThat(sessionId).hasSize(64));
Assertions.assertThat(logEntry.sslPeerNotBefore()).hasValue(Instant.EPOCH);
Assertions.assertThat(logEntry.sslPeerNotAfter()).hasValue(Instant.EPOCH.plus(100_000, ChronoUnit.DAYS));
}
@Test
public void requireThatRequestIsTrackedInAccessLog() throws IOException, InterruptedException {
BlockingQueueRequestLog requestLogMock = new BlockingQueueRequestLog();
JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder(),
binder -> binder.bind(RequestLog.class).toInstance(requestLogMock));
driver.client().newPost("/status.html").setContent("abcdef").execute().expectStatusCode(is(OK));
RequestLogEntry entry = requestLogMock.poll(Duration.ofSeconds(5));
Assertions.assertThat(entry.statusCode()).hasValue(200);
Assertions.assertThat(entry.requestSize()).hasValue(6);
assertThat(driver.close(), is(true));
}
@Test
public void requireThatRequestsPerConnectionMetricIsAggregated() throws IOException {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
var metricConsumer = new MetricConsumerMock();
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
JettyTestDriver driver = createSslTestDriver(certificateFile, privateKeyFile, metricConsumer, connectionLog);
driver.client().get("/").expectStatusCode(is(OK));
assertThat(driver.close(), is(true));
verify(metricConsumer.mockitoMock(), atLeast(1))
.set(MetricDefinitions.REQUESTS_PER_CONNECTION, 1L, MetricConsumerMock.STATIC_CONTEXT);
}
@Test
public void uriWithEmptyPathSegmentIsAllowed() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
MetricConsumerMock metricConsumer = new MetricConsumerMock();
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
JettyTestDriver driver = createSslTestDriver(certificateFile, privateKeyFile, metricConsumer, connectionLog);
String uriPath = "/path/with/empty
driver.client().get(uriPath).expectStatusCode(is(OK));
try (CloseableHttpAsyncClient client = createHttp2Client(driver)) {
String uri = "https:
SimpleHttpResponse response = client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
assertEquals(OK, response.getCode());
}
assertTrue(driver.close());
}
private static CloseableHttpAsyncClient createHttp2Client(JettyTestDriver driver) {
TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create()
.setSslContext(driver.sslContext())
.build();
var client = H2AsyncClientBuilder.create()
.disableAutomaticRetries()
.setTlsStrategy(tlsStrategy)
.build();
client.start();
return client;
}
private static JettyTestDriver createSslWithTlsClientAuthenticationEnforcer(Path certificateFile, Path privateKeyFile) {
ConnectorConfig.Builder connectorConfig = new ConnectorConfig.Builder()
.tlsClientAuthEnforcer(
new ConnectorConfig.TlsClientAuthEnforcer.Builder()
.enable(true)
.pathWhitelist("/status.html"))
.ssl(new ConnectorConfig.Ssl.Builder()
.enabled(true)
.clientAuth(ConnectorConfig.Ssl.ClientAuth.Enum.WANT_AUTH)
.privateKeyFile(privateKeyFile.toString())
.certificateFile(certificateFile.toString())
.caCertificateFile(certificateFile.toString()));
return JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
new ServerConfig.Builder().connectionLog(new ServerConfig.ConnectionLog.Builder().enabled(true)),
connectorConfig,
binder -> {});
}
private static RequestHandler mockRequestHandler() {
final RequestHandler mockRequestHandler = mock(RequestHandler.class);
when(mockRequestHandler.refer()).thenReturn(References.NOOP_REFERENCE);
return mockRequestHandler;
}
private static String generateContent(final char c, final int len) {
final StringBuilder ret = new StringBuilder(len);
for (int i = 0; i < len; ++i) {
ret.append(c);
}
return ret.toString();
}
private static JettyTestDriver newDriverWithFormPostContentRemoved(RequestHandler requestHandler,
boolean removeFormPostBody) throws Exception {
return JettyTestDriver.newConfiguredInstance(
requestHandler,
new ServerConfig.Builder()
.removeRawPostBodyForWwwUrlEncodedPost(removeFormPostBody),
new ConnectorConfig.Builder());
}
private static FormBodyPart newFileBody(final String fileName, final String fileContent) {
return FormBodyPartBuilder.create()
.setBody(
new StringBody(fileContent, ContentType.TEXT_PLAIN) {
@Override public String getFilename() { return fileName; }
@Override public String getMimeType() { return ""; }
@Override public String getCharset() { return null; }
})
.setName(fileName)
.build();
}
private static class ConnectedAtRequestHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final HttpRequest httpRequest = (HttpRequest)request;
final String connectedAt = String.valueOf(httpRequest.getConnectedAt(TimeUnit.MILLISECONDS));
final ContentChannel ch = handler.handleResponse(new Response(OK));
ch.write(ByteBuffer.wrap(connectedAt.getBytes(StandardCharsets.UTF_8)), null);
ch.close(null);
return null;
}
}
private static class CookieSetterRequestHandler extends AbstractRequestHandler {
final Cookie cookie;
CookieSetterRequestHandler(final Cookie cookie) {
this.cookie = cookie;
}
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final HttpResponse response = HttpResponse.newInstance(OK);
response.encodeSetCookieHeader(Collections.singletonList(cookie));
ResponseDispatch.newInstance(response).dispatch(handler);
return null;
}
}
private static class CookiePrinterRequestHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final List<Cookie> cookies = new ArrayList<>(((HttpRequest)request).decodeCookieHeader());
Collections.sort(cookies, new CookieComparator());
final ContentChannel out = ResponseDispatch.newInstance(Response.Status.OK).connect(handler);
out.write(StandardCharsets.UTF_8.encode(cookies.toString()), null);
out.close(null);
return null;
}
}
private static class ParameterPrinterRequestHandler extends AbstractRequestHandler {
private static final CompletionHandler NULL_COMPLETION_HANDLER = null;
@Override
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
Map<String, List<String>> parameters = new TreeMap<>(((HttpRequest)request).parameters());
ContentChannel responseContentChannel = ResponseDispatch.newInstance(Response.Status.OK).connect(handler);
responseContentChannel.write(ByteBuffer.wrap(parameters.toString().getBytes(StandardCharsets.UTF_8)),
NULL_COMPLETION_HANDLER);
return responseContentChannel;
}
}
private static class RequestTypeHandler extends AbstractRequestHandler {
private Request.RequestType requestType = null;
public void setRequestType(Request.RequestType requestType) {
this.requestType = requestType;
}
@Override
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
Response response = new Response(OK);
response.setRequestType(requestType);
return handler.handleResponse(response);
}
}
private static class ThrowingHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
throw new RuntimeException("Deliberately thrown exception");
}
}
private static class UnresponsiveHandler extends AbstractRequestHandler {
ResponseHandler responseHandler;
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
request.setTimeout(100, TimeUnit.MILLISECONDS);
responseHandler = handler;
return null;
}
}
private static class OkRequestHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
Response response = new Response(OK);
handler.handleResponse(response).close(null);
return NullContent.INSTANCE;
}
}
private static class EchoWithHeaderRequestHandler extends AbstractRequestHandler {
final String headerName;
final String headerValue;
EchoWithHeaderRequestHandler(final String headerName, final String headerValue) {
this.headerName = headerName;
this.headerValue = headerValue;
}
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final Response response = new Response(OK);
response.headers().add(headerName, headerValue);
return handler.handleResponse(response);
}
}
private static Module newBindingSetSelector(final String setName) {
return new AbstractModule() {
@Override
protected void configure() {
bind(BindingSetSelector.class).toInstance(new BindingSetSelector() {
@Override
public String select(final URI uri) {
return setName;
}
});
}
};
}
private static class CookieComparator implements Comparator<Cookie> {
@Override
public int compare(final Cookie lhs, final Cookie rhs) {
return lhs.getName().compareTo(rhs.getName());
}
}
} | class HttpServerTest {
@Rule
public TemporaryFolder tmpFolder = new TemporaryFolder();
@Test
public void requireThatServerCanListenToRandomPort() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(mockRequestHandler());
assertNotEquals(0, driver.server().getListenPort());
assertTrue(driver.close());
}
@Test
public void requireThatServerCanNotListenToBoundPort() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(mockRequestHandler());
try {
JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder()
.listenPort(driver.server().getListenPort())
);
} catch (final Throwable t) {
assertThat(t.getCause(), instanceOf(BindException.class));
}
assertTrue(driver.close());
}
@Test
public void requireThatBindingSetNotFoundReturns404() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder()
.developerMode(true),
new ConnectorConfig.Builder(),
newBindingSetSelector("unknown"));
driver.client().get("/status.html")
.expectStatusCode(is(NOT_FOUND))
.expectContent(containsPattern(Pattern.compile(
Pattern.quote(BindingSetNotFoundException.class.getName()) +
": No binding set named 'unknown'\\.\n\tat .+",
Pattern.DOTALL | Pattern.MULTILINE)));
assertTrue(driver.close());
}
@Test
public void requireThatTooLongInitLineReturns414() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder()
.requestHeaderSize(1));
driver.client().get("/status.html")
.expectStatusCode(is(REQUEST_URI_TOO_LONG));
assertTrue(driver.close());
}
@Test
public void requireThatAccessLogIsCalledForRequestRejectedByJetty() throws Exception {
BlockingQueueRequestLog requestLogMock = new BlockingQueueRequestLog();
final JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
mockRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder().requestHeaderSize(1),
binder -> binder.bind(RequestLog.class).toInstance(requestLogMock));
driver.client().get("/status.html")
.expectStatusCode(is(REQUEST_URI_TOO_LONG));
RequestLogEntry entry = requestLogMock.poll(Duration.ofSeconds(5));
assertEquals(414, entry.statusCode().getAsInt());
assertThat(driver.close(), is(true));
}
@Test
public void requireThatServerCanEcho() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatServerCanEchoCompressed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
SimpleHttpClient client = driver.newClient(true);
client.get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatServerCanHandleMultipleRequests() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
driver.client().get("/status.html")
.expectStatusCode(is(OK));
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWorks() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final String requestContent = generateContent('a', 30);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent(requestContent)
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith('{' + requestContent + "=[]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostDoesNotRemoveContentByDefault() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{foo=[bar]}foo=bar"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostKeepsContentWhenConfiguredTo() throws Exception {
final JettyTestDriver driver = newDriverWithFormPostContentRemoved(new ParameterPrinterRequestHandler(), false);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{foo=[bar]}foo=bar"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostRemovesContentWhenConfiguredTo() throws Exception {
final JettyTestDriver driver = newDriverWithFormPostContentRemoved(new ParameterPrinterRequestHandler(), true);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{foo=[bar]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWithCharsetSpecifiedWorks() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final String requestContent = generateContent('a', 30);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(X_DISABLE_CHUNKING, "true")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED + ";charset=UTF-8")
.setContent(requestContent)
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith('{' + requestContent + "=[]}"));
assertTrue(driver.close());
}
@Test
public void requireThatEmptyFormPostWorks() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormParametersAreParsed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("a=b&c=d")
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith("{a=[b], c=[d]}"));
assertTrue(driver.close());
}
@Test
public void requireThatUriParametersAreParsed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html?a=b&c=d")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{a=[b], c=[d]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormAndUriParametersAreMerged() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html?a=b&c=d1")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("c=d2&e=f")
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith("{a=[b], c=[d1, d2], e=[f]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormCharsetIsHonored() throws Exception {
final JettyTestDriver driver = newDriverWithFormPostContentRemoved(new ParameterPrinterRequestHandler(), true);
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED + ";charset=ISO-8859-1")
.setBinaryContent(new byte[]{66, (byte) 230, 114, 61, 98, 108, (byte) 229})
.execute();
response.expectStatusCode(is(OK))
.expectContent(is("{B\u00e6r=[bl\u00e5]}"));
assertTrue(driver.close());
}
@Test
public void requireThatUnknownFormCharsetIsTreatedAsBadRequest() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED + ";charset=FLARBA-GARBA-7")
.setContent("a=b")
.execute();
response.expectStatusCode(is(UNSUPPORTED_MEDIA_TYPE));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWithPercentEncodedContentIsDecoded() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("%20%3D%C3%98=%22%25+")
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith("{ =\u00d8=[\"% ]}"));
assertTrue(driver.close());
}
@Test
public void requireThatFormPostWithThrowingHandlerIsExceptionSafe() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ThrowingHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setContent("a=b")
.execute();
response.expectStatusCode(is(INTERNAL_SERVER_ERROR));
assertTrue(driver.close());
}
@Test
public void requireThatMultiPostWorks() throws Exception {
final String startTxtContent = "this is a test for POST.";
final String updaterConfContent
= "identifier = updater\n"
+ "server_type = gds\n";
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.setMultipartContent(
newFileBody("start.txt", startTxtContent),
newFileBody("updater.conf", updaterConfContent))
.execute();
response.expectStatusCode(is(OK))
.expectContent(containsString(startTxtContent))
.expectContent(containsString(updaterConfContent));
}
@Test
public void requireThatRequestCookiesAreReceived() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new CookiePrinterRequestHandler());
final ResponseValidator response =
driver.client().newPost("/status.html")
.addHeader(COOKIE, "foo=bar")
.execute();
response.expectStatusCode(is(OK))
.expectContent(containsString("[foo=bar]"));
assertTrue(driver.close());
}
@Test
public void requireThatSetCookieHeaderIsCorrect() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new CookieSetterRequestHandler(
new Cookie("foo", "bar")
.setDomain(".localhost")
.setHttpOnly(true)
.setPath("/foopath")
.setSecure(true)));
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectHeader("Set-Cookie",
is("foo=bar; Path=/foopath; Domain=.localhost; Secure; HttpOnly"));
assertTrue(driver.close());
}
@Test
public void requireThatTimeoutWorks() throws Exception {
final UnresponsiveHandler requestHandler = new UnresponsiveHandler();
final JettyTestDriver driver = JettyTestDriver.newInstance(requestHandler);
driver.client().get("/status.html")
.expectStatusCode(is(GATEWAY_TIMEOUT));
ResponseDispatch.newInstance(OK).dispatch(requestHandler.responseHandler);
assertTrue(driver.close());
}
@Test
public void requireThatHeaderWithNullValueIsOmitted() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoWithHeaderRequestHandler("X-Foo", null));
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectNoHeader("X-Foo");
assertTrue(driver.close());
}
@Test
public void requireThatHeaderWithEmptyValueIsAllowed() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoWithHeaderRequestHandler("X-Foo", ""));
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectHeader("X-Foo", is(""));
assertTrue(driver.close());
}
@Test
public void requireThatNoConnectionHeaderMeansKeepAliveInHttp11KeepAliveDisabled() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new EchoWithHeaderRequestHandler(CONNECTION, CLOSE));
driver.client().get("/status.html")
.expectHeader(CONNECTION, is(CLOSE));
assertThat(driver.close(), is(true));
}
@Test
@Test
public void requireThatServerCanRespondToSslRequest() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
final JettyTestDriver driver = JettyTestDriver.newInstanceWithSsl(new EchoRequestHandler(), certificateFile, privateKeyFile, TlsClientAuth.WANT);
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatServerCanRespondToHttp2Request() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
MetricConsumerMock metricConsumer = new MetricConsumerMock();
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
JettyTestDriver driver = createSslTestDriver(certificateFile, privateKeyFile, metricConsumer, connectionLog);
try (CloseableHttpAsyncClient client = createHttp2Client(driver)) {
String uri = "https:
SimpleHttpResponse response = client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
assertNull(response.getBodyText());
assertEquals(OK, response.getCode());
}
assertTrue(driver.close());
ConnectionLogEntry entry = connectionLog.logEntries().get(0);
assertEquals("HTTP/2.0", entry.httpProtocol().get());
}
@Test
public void requireThatTlsClientAuthenticationEnforcerRejectsRequestsForNonWhitelistedPaths() throws IOException {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
JettyTestDriver driver = createSslWithTlsClientAuthenticationEnforcer(certificateFile, privateKeyFile);
SSLContext trustStoreOnlyCtx = new SslContextBuilder()
.withTrustStore(certificateFile)
.build();
new SimpleHttpClient(trustStoreOnlyCtx, driver.server().getListenPort(), false)
.get("/dummy.html")
.expectStatusCode(is(UNAUTHORIZED));
assertTrue(driver.close());
}
@Test
public void requireThatTlsClientAuthenticationEnforcerAllowsRequestForWhitelistedPaths() throws IOException {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
JettyTestDriver driver = JettyTestDriver.newInstanceWithSsl(new EchoRequestHandler(), certificateFile, privateKeyFile, TlsClientAuth.WANT);
SSLContext trustStoreOnlyCtx = new SslContextBuilder()
.withTrustStore(certificateFile)
.build();
new SimpleHttpClient(trustStoreOnlyCtx, driver.server().getListenPort(), false)
.get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatConnectedAtReturnsNonZero() throws Exception {
final JettyTestDriver driver = JettyTestDriver.newInstance(new ConnectedAtRequestHandler());
driver.client().get("/status.html")
.expectStatusCode(is(OK))
.expectContent(matchesPattern("\\d{13,}"));
assertThat(driver.close(), is(true));
}
@Test
public void requireThatGzipEncodingRequestsAreAutomaticallyDecompressed() throws Exception {
JettyTestDriver driver = JettyTestDriver.newInstance(new ParameterPrinterRequestHandler());
String requestContent = generateContent('a', 30);
ResponseValidator response = driver.client().newPost("/status.html")
.addHeader(CONTENT_TYPE, APPLICATION_X_WWW_FORM_URLENCODED)
.setGzipContent(requestContent)
.execute();
response.expectStatusCode(is(OK))
.expectContent(startsWith('{' + requestContent + "=[]}"));
assertTrue(driver.close());
}
@Test
public void requireThatResponseStatsAreCollected() throws Exception {
RequestTypeHandler handler = new RequestTypeHandler();
JettyTestDriver driver = JettyTestDriver.newInstance(handler);
HttpResponseStatisticsCollector statisticsCollector = ((AbstractHandlerContainer) driver.server().server().getHandler())
.getChildHandlerByClass(HttpResponseStatisticsCollector.class);
{
List<HttpResponseStatisticsCollector.StatisticsEntry> stats = statisticsCollector.takeStatistics();
assertEquals(0, stats.size());
}
{
driver.client().newPost("/status.html").execute();
var entry = waitForStatistics(statisticsCollector);
assertEquals("http", entry.scheme);
assertEquals("POST", entry.method);
assertEquals("http.status.2xx", entry.name);
assertEquals("write", entry.requestType);
assertEquals(1, entry.value);
}
{
driver.client().newGet("/status.html").execute();
var entry = waitForStatistics(statisticsCollector);
assertEquals("http", entry.scheme);
assertEquals("GET", entry.method);
assertEquals("http.status.2xx", entry.name);
assertEquals("read", entry.requestType);
assertEquals(1, entry.value);
}
{
handler.setRequestType(Request.RequestType.READ);
driver.client().newPost("/status.html").execute();
var entry = waitForStatistics(statisticsCollector);
assertEquals("Handler overrides request type", "read", entry.requestType);
}
assertTrue(driver.close());
}
private HttpResponseStatisticsCollector.StatisticsEntry waitForStatistics(HttpResponseStatisticsCollector
statisticsCollector) {
List<HttpResponseStatisticsCollector.StatisticsEntry> entries = Collections.emptyList();
int tries = 0;
while (entries.isEmpty() && tries < 10000) {
entries = statisticsCollector.takeStatistics();
if (entries.isEmpty())
try {Thread.sleep(100); } catch (InterruptedException e) {}
tries++;
}
assertEquals(1, entries.size());
return entries.get(0);
}
@Test
public void requireThatConnectionThrottleDoesNotBlockConnectionsBelowThreshold() throws Exception {
JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder()
.throttling(new Throttling.Builder()
.enabled(true)
.maxAcceptRate(10)
.maxHeapUtilization(1.0)
.maxConnections(10)));
driver.client().get("/status.html")
.expectStatusCode(is(OK));
assertTrue(driver.close());
}
@Test
public void requireThatConnectionIsTrackedInConnectionLog() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
Module overrideModule = binder -> binder.bind(ConnectionLog.class).toInstance(connectionLog);
JettyTestDriver driver = JettyTestDriver.newInstanceWithSsl(new OkRequestHandler(), certificateFile, privateKeyFile, TlsClientAuth.NEED, overrideModule);
int listenPort = driver.server().getListenPort();
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 1000; i++) {
builder.append(i);
}
byte[] content = builder.toString().getBytes();
for (int i = 0; i < 100; i++) {
driver.client().newPost("/status.html").setBinaryContent(content).execute()
.expectStatusCode(is(OK));
}
assertTrue(driver.close());
List<ConnectionLogEntry> logEntries = connectionLog.logEntries();
Assertions.assertThat(logEntries).hasSize(1);
ConnectionLogEntry logEntry = logEntries.get(0);
assertEquals(4, UUID.fromString(logEntry.id()).version());
Assertions.assertThat(logEntry.timestamp()).isAfter(Instant.EPOCH);
Assertions.assertThat(logEntry.requests()).hasValue(100L);
Assertions.assertThat(logEntry.responses()).hasValue(100L);
Assertions.assertThat(logEntry.peerAddress()).hasValue("127.0.0.1");
Assertions.assertThat(logEntry.localAddress()).hasValue("127.0.0.1");
Assertions.assertThat(logEntry.localPort()).hasValue(listenPort);
Assertions.assertThat(logEntry.httpBytesReceived()).hasValueSatisfying(value -> Assertions.assertThat(value).isGreaterThan(100000L));
Assertions.assertThat(logEntry.httpBytesSent()).hasValueSatisfying(value -> Assertions.assertThat(value).isGreaterThan(10000L));
Assertions.assertThat(logEntry.sslProtocol()).hasValueSatisfying(TlsContext.ALLOWED_PROTOCOLS::contains);
Assertions.assertThat(logEntry.sslPeerSubject()).hasValue("CN=localhost");
Assertions.assertThat(logEntry.sslCipherSuite()).hasValueSatisfying(cipher -> Assertions.assertThat(cipher).isNotBlank());
Assertions.assertThat(logEntry.sslSessionId()).hasValueSatisfying(sessionId -> Assertions.assertThat(sessionId).hasSize(64));
Assertions.assertThat(logEntry.sslPeerNotBefore()).hasValue(Instant.EPOCH);
Assertions.assertThat(logEntry.sslPeerNotAfter()).hasValue(Instant.EPOCH.plus(100_000, ChronoUnit.DAYS));
}
@Test
public void requireThatRequestIsTrackedInAccessLog() throws IOException, InterruptedException {
BlockingQueueRequestLog requestLogMock = new BlockingQueueRequestLog();
JettyTestDriver driver = JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
new ServerConfig.Builder(),
new ConnectorConfig.Builder(),
binder -> binder.bind(RequestLog.class).toInstance(requestLogMock));
driver.client().newPost("/status.html").setContent("abcdef").execute().expectStatusCode(is(OK));
RequestLogEntry entry = requestLogMock.poll(Duration.ofSeconds(5));
Assertions.assertThat(entry.statusCode()).hasValue(200);
Assertions.assertThat(entry.requestSize()).hasValue(6);
assertThat(driver.close(), is(true));
}
@Test
public void requireThatRequestsPerConnectionMetricIsAggregated() throws IOException {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
var metricConsumer = new MetricConsumerMock();
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
JettyTestDriver driver = createSslTestDriver(certificateFile, privateKeyFile, metricConsumer, connectionLog);
driver.client().get("/").expectStatusCode(is(OK));
assertThat(driver.close(), is(true));
verify(metricConsumer.mockitoMock(), atLeast(1))
.set(MetricDefinitions.REQUESTS_PER_CONNECTION, 1L, MetricConsumerMock.STATIC_CONTEXT);
}
@Test
public void uriWithEmptyPathSegmentIsAllowed() throws Exception {
Path privateKeyFile = tmpFolder.newFile().toPath();
Path certificateFile = tmpFolder.newFile().toPath();
generatePrivateKeyAndCertificate(privateKeyFile, certificateFile);
MetricConsumerMock metricConsumer = new MetricConsumerMock();
InMemoryConnectionLog connectionLog = new InMemoryConnectionLog();
JettyTestDriver driver = createSslTestDriver(certificateFile, privateKeyFile, metricConsumer, connectionLog);
String uriPath = "/path/with/empty
driver.client().get(uriPath).expectStatusCode(is(OK));
try (CloseableHttpAsyncClient client = createHttp2Client(driver)) {
String uri = "https:
SimpleHttpResponse response = client.execute(SimpleRequestBuilder.get(uri).build(), null).get();
assertEquals(OK, response.getCode());
}
assertTrue(driver.close());
}
private static CloseableHttpAsyncClient createHttp2Client(JettyTestDriver driver) {
TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create()
.setSslContext(driver.sslContext())
.build();
var client = H2AsyncClientBuilder.create()
.disableAutomaticRetries()
.setTlsStrategy(tlsStrategy)
.build();
client.start();
return client;
}
private static JettyTestDriver createSslWithTlsClientAuthenticationEnforcer(Path certificateFile, Path privateKeyFile) {
ConnectorConfig.Builder connectorConfig = new ConnectorConfig.Builder()
.tlsClientAuthEnforcer(
new ConnectorConfig.TlsClientAuthEnforcer.Builder()
.enable(true)
.pathWhitelist("/status.html"))
.ssl(new ConnectorConfig.Ssl.Builder()
.enabled(true)
.clientAuth(ConnectorConfig.Ssl.ClientAuth.Enum.WANT_AUTH)
.privateKeyFile(privateKeyFile.toString())
.certificateFile(certificateFile.toString())
.caCertificateFile(certificateFile.toString()));
return JettyTestDriver.newConfiguredInstance(
new EchoRequestHandler(),
new ServerConfig.Builder().connectionLog(new ServerConfig.ConnectionLog.Builder().enabled(true)),
connectorConfig,
binder -> {});
}
private static RequestHandler mockRequestHandler() {
final RequestHandler mockRequestHandler = mock(RequestHandler.class);
when(mockRequestHandler.refer()).thenReturn(References.NOOP_REFERENCE);
return mockRequestHandler;
}
private static String generateContent(final char c, final int len) {
final StringBuilder ret = new StringBuilder(len);
for (int i = 0; i < len; ++i) {
ret.append(c);
}
return ret.toString();
}
private static JettyTestDriver newDriverWithFormPostContentRemoved(RequestHandler requestHandler,
boolean removeFormPostBody) throws Exception {
return JettyTestDriver.newConfiguredInstance(
requestHandler,
new ServerConfig.Builder()
.removeRawPostBodyForWwwUrlEncodedPost(removeFormPostBody),
new ConnectorConfig.Builder());
}
private static FormBodyPart newFileBody(final String fileName, final String fileContent) {
return FormBodyPartBuilder.create()
.setBody(
new StringBody(fileContent, ContentType.TEXT_PLAIN) {
@Override public String getFilename() { return fileName; }
@Override public String getMimeType() { return ""; }
@Override public String getCharset() { return null; }
})
.setName(fileName)
.build();
}
private static class ConnectedAtRequestHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final HttpRequest httpRequest = (HttpRequest)request;
final String connectedAt = String.valueOf(httpRequest.getConnectedAt(TimeUnit.MILLISECONDS));
final ContentChannel ch = handler.handleResponse(new Response(OK));
ch.write(ByteBuffer.wrap(connectedAt.getBytes(StandardCharsets.UTF_8)), null);
ch.close(null);
return null;
}
}
private static class CookieSetterRequestHandler extends AbstractRequestHandler {
final Cookie cookie;
CookieSetterRequestHandler(final Cookie cookie) {
this.cookie = cookie;
}
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final HttpResponse response = HttpResponse.newInstance(OK);
response.encodeSetCookieHeader(Collections.singletonList(cookie));
ResponseDispatch.newInstance(response).dispatch(handler);
return null;
}
}
private static class CookiePrinterRequestHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final List<Cookie> cookies = new ArrayList<>(((HttpRequest)request).decodeCookieHeader());
Collections.sort(cookies, new CookieComparator());
final ContentChannel out = ResponseDispatch.newInstance(Response.Status.OK).connect(handler);
out.write(StandardCharsets.UTF_8.encode(cookies.toString()), null);
out.close(null);
return null;
}
}
private static class ParameterPrinterRequestHandler extends AbstractRequestHandler {
private static final CompletionHandler NULL_COMPLETION_HANDLER = null;
@Override
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
Map<String, List<String>> parameters = new TreeMap<>(((HttpRequest)request).parameters());
ContentChannel responseContentChannel = ResponseDispatch.newInstance(Response.Status.OK).connect(handler);
responseContentChannel.write(ByteBuffer.wrap(parameters.toString().getBytes(StandardCharsets.UTF_8)),
NULL_COMPLETION_HANDLER);
return responseContentChannel;
}
}
private static class RequestTypeHandler extends AbstractRequestHandler {
private Request.RequestType requestType = null;
public void setRequestType(Request.RequestType requestType) {
this.requestType = requestType;
}
@Override
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
Response response = new Response(OK);
response.setRequestType(requestType);
return handler.handleResponse(response);
}
}
private static class ThrowingHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
throw new RuntimeException("Deliberately thrown exception");
}
}
private static class UnresponsiveHandler extends AbstractRequestHandler {
ResponseHandler responseHandler;
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
request.setTimeout(100, TimeUnit.MILLISECONDS);
responseHandler = handler;
return null;
}
}
private static class OkRequestHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
Response response = new Response(OK);
handler.handleResponse(response).close(null);
return NullContent.INSTANCE;
}
}
private static class EchoWithHeaderRequestHandler extends AbstractRequestHandler {
final String headerName;
final String headerValue;
EchoWithHeaderRequestHandler(final String headerName, final String headerValue) {
this.headerName = headerName;
this.headerValue = headerValue;
}
@Override
public ContentChannel handleRequest(final Request request, final ResponseHandler handler) {
final Response response = new Response(OK);
response.headers().add(headerName, headerValue);
return handler.handleResponse(response);
}
}
private static Module newBindingSetSelector(final String setName) {
return new AbstractModule() {
@Override
protected void configure() {
bind(BindingSetSelector.class).toInstance(new BindingSetSelector() {
@Override
public String select(final URI uri) {
return setName;
}
});
}
};
}
private static class CookieComparator implements Comparator<Cookie> {
@Override
public int compare(final Cookie lhs, final Cookie rhs) {
return lhs.getName().compareTo(rhs.getName());
}
}
} |
|
No biggie but maybe the null check could be the first statement to check in this `if-else` chain | public CompletionStage<ResultNode> resolve(SectionResolutionContext context) {
return context.resolutionContext().evaluate(iterable).thenCompose(it -> {
List<CompletionStage<ResultNode>> results = new ArrayList<>();
Iterator<?> iterator;
if (it instanceof Iterable) {
iterator = ((Iterable<?>) it).iterator();
} else if (it instanceof Iterator) {
iterator = (Iterator<?>) it;
} else if (it instanceof Map) {
iterator = ((Map<?, ?>) it).entrySet().iterator();
} else if (it instanceof Stream) {
iterator = ((Stream<?>) it).sequential().iterator();
} else if (it instanceof Integer) {
iterator = IntStream.rangeClosed(1, (Integer) it).iterator();
} else if (it != null && it.getClass().isArray()) {
iterator = Arrays.stream((Object[]) it).iterator();
} else {
throw new IllegalStateException(
String.format("Cannot iterate over [%s] resolved for [%s] in template %s on line %s", it,
iterable.toOriginalString(), iterable.origin.getTemplateId(), iterable.origin.getLine()));
}
int idx = 0;
while (iterator.hasNext()) {
results.add(nextElement(iterator.next(), idx++, iterator.hasNext(), context));
}
if (results.isEmpty()) {
return CompletableFuture.completedFuture(ResultNode.NOOP);
}
CompletableFuture<ResultNode> result = new CompletableFuture<>();
CompletableFuture<ResultNode>[] all = new CompletableFuture[results.size()];
idx = 0;
for (CompletionStage<ResultNode> r : results) {
all[idx++] = r.toCompletableFuture();
}
CompletableFuture
.allOf(all)
.whenComplete((v, t) -> {
if (t != null) {
result.completeExceptionally(t);
} else {
result.complete(new MultiResultNode(all));
}
});
return result;
});
} | } else if (it != null && it.getClass().isArray()) { | public CompletionStage<ResultNode> resolve(SectionResolutionContext context) {
return context.resolutionContext().evaluate(iterable).thenCompose(it -> {
if (it == null) {
throw new TemplateException(String.format(
"Loop section error in template %s on line %s: [%s] resolved to [null] which is not iterable",
iterable.origin.getTemplateId(), iterable.origin.getLine(), iterable.toOriginalString()));
}
List<CompletionStage<ResultNode>> results = new ArrayList<>();
Iterator<?> iterator = extractIterator(it);
int idx = 0;
while (iterator.hasNext()) {
results.add(nextElement(iterator.next(), idx++, iterator.hasNext(), context));
}
if (results.isEmpty()) {
return CompletableFuture.completedFuture(ResultNode.NOOP);
}
CompletableFuture<ResultNode> result = new CompletableFuture<>();
CompletableFuture<ResultNode>[] all = new CompletableFuture[results.size()];
idx = 0;
for (CompletionStage<ResultNode> r : results) {
all[idx++] = r.toCompletableFuture();
}
CompletableFuture
.allOf(all)
.whenComplete((v, t) -> {
if (t != null) {
result.completeExceptionally(t);
} else {
result.complete(new MultiResultNode(all));
}
});
return result;
});
} | class LoopSectionHelper implements SectionHelper {
private static final String DEFAULT_ALIAS = "it";
private final String alias;
private final Expression iterable;
LoopSectionHelper(String alias, Expression iterable) {
this.alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias;
this.iterable = Objects.requireNonNull(iterable);
}
@SuppressWarnings("unchecked")
@Override
CompletionStage<ResultNode> nextElement(Object element, int index, boolean hasNext, SectionResolutionContext context) {
AtomicReference<ResolutionContext> resolutionContextHolder = new AtomicReference<>();
ResolutionContext child = context.resolutionContext().createChild(new IterationElement(alias, element, index, hasNext),
null);
resolutionContextHolder.set(child);
return context.execute(child);
}
public static class Factory implements SectionHelperFactory<LoopSectionHelper> {
public static final String HINT = "<for-element>";
private static final String ALIAS = "alias";
private static final String IN = "in";
private static final String ITERABLE = "iterable";
@Override
public List<String> getDefaultAliases() {
return ImmutableList.of("for", "each");
}
@Override
public ParametersInfo getParameters() {
return ParametersInfo.builder()
.addParameter(ALIAS, EMPTY)
.addParameter(IN, EMPTY)
.addParameter(new Parameter(ITERABLE, null, true))
.build();
}
@Override
public LoopSectionHelper initialize(SectionInitContext context) {
return new LoopSectionHelper(context.getParameter(ALIAS), context.getExpression(ITERABLE));
}
@Override
public Map<String, String> initializeBlock(Map<String, String> outerNameTypeInfos, BlockInfo block) {
if (block.getLabel().equals(MAIN_BLOCK_NAME)) {
String iterable = block.getParameters().get(ITERABLE);
if (iterable == null) {
iterable = ValueResolvers.THIS;
}
Expression iterableExpr = block.addExpression(ITERABLE, iterable);
String alias = block.getParameters().get(ALIAS);
if (iterableExpr.typeCheckInfo != null) {
alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias;
Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos);
typeInfos.put(alias, iterableExpr.typeCheckInfo + HINT);
return typeInfos;
} else {
Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos);
typeInfos.put(alias, null);
return typeInfos;
}
} else {
return Collections.emptyMap();
}
}
}
static class IterationElement implements Mapper {
final String alias;
final Object element;
final int index;
final boolean hasNext;
public IterationElement(String alias, Object element, int index, boolean hasNext) {
this.alias = alias;
this.element = element;
this.index = index;
this.hasNext = hasNext;
}
@Override
public Object get(String key) {
if (alias.equals(key)) {
return element;
}
switch (key) {
case "count":
return index + 1;
case "index":
return index;
case "indexParity":
return index % 2 != 0 ? "even" : "odd";
case "hasNext":
return hasNext;
case "isOdd":
case "odd":
return index % 2 == 0;
case "isEven":
case "even":
return index % 2 != 0;
default:
return Result.NOT_FOUND;
}
}
}
} | class LoopSectionHelper implements SectionHelper {
private static final String DEFAULT_ALIAS = "it";
private final String alias;
private final Expression iterable;
LoopSectionHelper(String alias, Expression iterable) {
this.alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias;
this.iterable = Objects.requireNonNull(iterable);
}
@SuppressWarnings("unchecked")
@Override
private Iterator<?> extractIterator(Object it) {
if (it instanceof Iterable) {
return ((Iterable<?>) it).iterator();
} else if (it instanceof Iterator) {
return (Iterator<?>) it;
} else if (it instanceof Map) {
return ((Map<?, ?>) it).entrySet().iterator();
} else if (it instanceof Stream) {
return ((Stream<?>) it).sequential().iterator();
} else if (it instanceof Integer) {
return IntStream.rangeClosed(1, (Integer) it).iterator();
} else if (it.getClass().isArray()) {
return Arrays.stream((Object[]) it).iterator();
} else {
throw new TemplateException(String.format(
"Loop section error in template %s on line %s: [%s] resolved to [%s] which is not iterable",
iterable.origin.getTemplateId(), iterable.origin.getLine(), iterable.toOriginalString(),
it.getClass().getName()));
}
}
CompletionStage<ResultNode> nextElement(Object element, int index, boolean hasNext, SectionResolutionContext context) {
AtomicReference<ResolutionContext> resolutionContextHolder = new AtomicReference<>();
ResolutionContext child = context.resolutionContext().createChild(new IterationElement(alias, element, index, hasNext),
null);
resolutionContextHolder.set(child);
return context.execute(child);
}
public static class Factory implements SectionHelperFactory<LoopSectionHelper> {
public static final String HINT = "<for-element>";
private static final String ALIAS = "alias";
private static final String IN = "in";
private static final String ITERABLE = "iterable";
@Override
public List<String> getDefaultAliases() {
return ImmutableList.of("for", "each");
}
@Override
public ParametersInfo getParameters() {
return ParametersInfo.builder()
.addParameter(ALIAS, EMPTY)
.addParameter(IN, EMPTY)
.addParameter(new Parameter(ITERABLE, null, true))
.build();
}
@Override
public LoopSectionHelper initialize(SectionInitContext context) {
return new LoopSectionHelper(context.getParameter(ALIAS), context.getExpression(ITERABLE));
}
@Override
public Map<String, String> initializeBlock(Map<String, String> outerNameTypeInfos, BlockInfo block) {
if (block.getLabel().equals(MAIN_BLOCK_NAME)) {
String iterable = block.getParameters().get(ITERABLE);
if (iterable == null) {
iterable = ValueResolvers.THIS;
}
Expression iterableExpr = block.addExpression(ITERABLE, iterable);
String alias = block.getParameters().get(ALIAS);
if (iterableExpr.typeCheckInfo != null) {
alias = alias.equals(Parameter.EMPTY) ? DEFAULT_ALIAS : alias;
Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos);
typeInfos.put(alias, iterableExpr.typeCheckInfo + HINT);
return typeInfos;
} else {
Map<String, String> typeInfos = new HashMap<String, String>(outerNameTypeInfos);
typeInfos.put(alias, null);
return typeInfos;
}
} else {
return Collections.emptyMap();
}
}
}
static class IterationElement implements Mapper {
final String alias;
final Object element;
final int index;
final boolean hasNext;
public IterationElement(String alias, Object element, int index, boolean hasNext) {
this.alias = alias;
this.element = element;
this.index = index;
this.hasNext = hasNext;
}
@Override
public Object get(String key) {
if (alias.equals(key)) {
return element;
}
switch (key) {
case "count":
return index + 1;
case "index":
return index;
case "indexParity":
return index % 2 != 0 ? "even" : "odd";
case "hasNext":
return hasNext;
case "isOdd":
case "odd":
return index % 2 == 0;
case "isEven":
case "even":
return index % 2 != 0;
default:
return Result.NOT_FOUND;
}
}
}
} |
is there a lib in java like URL in C# that does this for us? | private String getHostNameFromEndpoint() {
return endpoint.replace("https:
} | return endpoint.replace("https: | private String getHostNameFromEndpoint() throws MalformedURLException {
return new URL(endpoint).getHost();
} | class CallingServerClientBuilder {
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private static final String APP_CONFIG_PROPERTIES = "azure-communication-callingserver.properties";
private final ClientLogger logger = new ClientLogger(CallingServerClientBuilder.class);
private String connectionString;
private String endpoint;
private String hostName;
private AzureKeyCredential azureKeyCredential;
private TokenCredential tokenCredential;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions = new HttpLogOptions();
private HttpPipeline pipeline;
private Configuration configuration;
private final Map<String, String> properties = CoreUtils.getProperties(APP_CONFIG_PROPERTIES);
private final List<HttpPipelinePolicy> customPolicies = new ArrayList<>();
private ClientOptions clientOptions;
private RetryPolicy retryPolicy;
/**
* Set endpoint of the service.
*
* @param endpoint url of the service.
* @return CallingServerClientBuilder object.
*/
public CallingServerClientBuilder endpoint(String endpoint) {
this.endpoint = Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
return this;
}
/**
* Set endpoint of the service.
*
* @param pipeline HttpPipeline to use, if a pipeline is not supplied, the
* credential and httpClient fields must be set.
* @return CallingServerClientBuilder object.
*/
public CallingServerClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = Objects.requireNonNull(pipeline, "'pipeline' cannot be null.");
return this;
}
/**
* Sets the {@link TokenCredential} used to authenticate HTTP requests.
*
* @param tokenCredential {@link TokenCredential} used to authenticate HTTP
* requests.
* @return Updated {@link CallingServerClientBuilder} object.
* @throws NullPointerException If {@code tokenCredential} is null.
*/
public CallingServerClientBuilder credential(TokenCredential tokenCredential) {
this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null.");
return this;
}
/**
* Sets the {@link AzureKeyCredential} used to authenticate HTTP requests.
*
* @param keyCredential The {@link AzureKeyCredential} used to authenticate HTTP
* requests.
* @return Updated {@link CallingServerClientBuilder} object.
* @throws NullPointerException If {@code keyCredential} is null.
*/
CallingServerClientBuilder credential(AzureKeyCredential keyCredential) {
this.azureKeyCredential = Objects.requireNonNull(keyCredential, "'keyCredential' cannot be null.");
return this;
}
/**
* Set connectionString to use.
*
* @param connectionString connection string to set.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
this.connectionString = connectionString;
return this;
}
/**
* Sets the retry policy to use (using the RetryPolicy type).
*
* @param retryPolicy object to be applied
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "'retryPolicy' cannot be null.");
return this;
}
/**
* Sets the configuration object used to retrieve environment configuration
* values during building of the client.
*
* @param configuration Configuration store used to retrieve environment
* configurations.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder configuration(Configuration configuration) {
this.configuration = Objects.requireNonNull(configuration, "'configuration' cannot be null.");
return this;
}
/**
* Sets the {@link HttpLogOptions} for service requests.
*
* @param logOptions The logging configuration to use when sending and receiving
* HTTP requests/responses.
* @return The updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.httpLogOptions = Objects.requireNonNull(logOptions, "'logOptions' cannot be null.");
return this;
}
/**
* Sets the {@link CallingServerServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version of the client library will have the result of potentially moving to a newer service version.
* <p>
* Targeting a specific service version may also mean that the service will return an error for newer APIs.
*
* @param version {@link CallingServerServiceVersion} of the service to be used when making requests.
* @return Updated CallingServerClientBuilder object
*/
public CallingServerClientBuilder serviceVersion(CallingServerServiceVersion version) {
return this;
}
/**
* Set httpClient to use
*
* @param httpClient httpClient to use, overridden by the pipeline field.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Apply additional HttpPipelinePolicy
*
* @param customPolicy HttpPipelinePolicy object to be applied after
* AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
this.customPolicies.add(Objects.requireNonNull(customPolicy, "'customPolicy' cannot be null."));
return this;
}
/**
* Create asynchronous client applying HMACAuthenticationPolicy,
* UserAgentPolicy, RetryPolicy, and CookiePolicy. Additional HttpPolicies
* specified by additionalPolicies will be applied after them
*
* @return The updated {@link CallingServerClientBuilder} object.
*/
public CallingServerAsyncClient buildAsyncClient() {
return new CallingServerAsyncClient(createServiceImpl());
}
/**
* Create synchronous client applying HmacAuthenticationPolicy, UserAgentPolicy,
* RetryPolicy, and CookiePolicy. Additional HttpPolicies specified by
* additionalPolicies will be applied after them.
*
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClient buildClient() {
return new CallingServerClient(buildAsyncClient());
}
private AzureCommunicationCallingServerServiceImpl createServiceImpl() {
boolean isConnectionStringSet = connectionString != null && !connectionString.trim().isEmpty();
boolean isEndpointSet = endpoint != null && !endpoint.trim().isEmpty();
boolean isAzureKeyCredentialSet = azureKeyCredential != null;
boolean isTokenCredentialSet = tokenCredential != null;
if (isConnectionStringSet && isEndpointSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'connectionString' and 'endpoint' are set. Just one may be used."));
}
if (isConnectionStringSet && isAzureKeyCredentialSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'connectionString' and 'keyCredential' are set. Just one may be used."));
}
if (isConnectionStringSet && isTokenCredentialSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'connectionString' and 'tokenCredential' are set. Just one may be used."));
}
if (isAzureKeyCredentialSet && isTokenCredentialSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'tokenCredential' and 'keyCredential' are set. Just one may be used."));
}
if (isConnectionStringSet) {
CommunicationConnectionString connectionStringObject = new CommunicationConnectionString(connectionString);
String endpoint = connectionStringObject.getEndpoint();
String accessKey = connectionStringObject.getAccessKey();
endpoint(endpoint).credential(new AzureKeyCredential(accessKey));
}
Objects.requireNonNull(endpoint);
if (isTokenCredentialSet) {
hostName = getHostNameFromEndpoint();
}
if (pipeline == null) {
Objects.requireNonNull(httpClient);
}
HttpPipeline builderPipeline = pipeline;
if (pipeline == null) {
builderPipeline = createHttpPipeline(httpClient);
}
AzureCommunicationCallingServerServiceImplBuilder clientBuilder = new AzureCommunicationCallingServerServiceImplBuilder();
clientBuilder.endpoint(endpoint).pipeline(builderPipeline);
return clientBuilder.buildClient();
}
/**
* Allows the user to set a variety of client-related options, such as
* user-agent string, headers, etc.
*
* @param clientOptions object to be applied.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
private List<HttpPipelinePolicy> createHttpPipelineAuthPolicies() {
if (tokenCredential != null && azureKeyCredential != null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'credential' and 'keyCredential' are set. Just one may be used."));
}
List<HttpPipelinePolicy> pipelinePolicies = new ArrayList<>();
if (tokenCredential != null) {
pipelinePolicies.add(new BearerTokenAuthenticationPolicy(tokenCredential,
"https:
pipelinePolicies.add(new TokenCredentialAddHostHeaderPolicy(hostName));
} else if (azureKeyCredential != null) {
pipelinePolicies.add(new HmacAuthenticationPolicy(azureKeyCredential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
return pipelinePolicies;
}
private HttpPipeline createHttpPipeline(HttpClient httpClient) {
if (pipeline != null) {
return pipeline;
}
List<HttpPipelinePolicy> policyList = new ArrayList<>();
ClientOptions buildClientOptions = (clientOptions == null) ? new ClientOptions() : clientOptions;
HttpLogOptions buildLogOptions = (httpLogOptions == null) ? new HttpLogOptions() : httpLogOptions;
String applicationId = null;
if (!CoreUtils.isNullOrEmpty(buildClientOptions.getApplicationId())) {
applicationId = buildClientOptions.getApplicationId();
} else if (!CoreUtils.isNullOrEmpty(buildLogOptions.getApplicationId())) {
applicationId = buildLogOptions.getApplicationId();
}
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policyList.add(new UserAgentPolicy(applicationId, clientName, clientVersion, configuration));
policyList.add(new RequestIdPolicy());
policyList.add((retryPolicy == null) ? new RetryPolicy() : retryPolicy);
policyList.add(new RedirectPolicy());
policyList.addAll(createHttpPipelineAuthPolicies());
policyList.add(new CookiePolicy());
if (!customPolicies.isEmpty()) {
policyList.addAll(customPolicies);
}
policyList.add(new HttpLoggingPolicy(getHttpLogOptions()));
return new HttpPipelineBuilder().policies(policyList.toArray(new HttpPipelinePolicy[0])).httpClient(httpClient)
.build();
}
private HttpLogOptions getHttpLogOptions() {
if (httpLogOptions == null) {
httpLogOptions = new HttpLogOptions();
}
return httpLogOptions;
}
} | class CallingServerClientBuilder {
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private static final String APP_CONFIG_PROPERTIES = "azure-communication-callingserver.properties";
private final ClientLogger logger = new ClientLogger(CallingServerClientBuilder.class);
private String connectionString;
private String endpoint;
private String hostName;
private AzureKeyCredential azureKeyCredential;
private TokenCredential tokenCredential;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions = new HttpLogOptions();
private HttpPipeline pipeline;
private Configuration configuration;
private final Map<String, String> properties = CoreUtils.getProperties(APP_CONFIG_PROPERTIES);
private final List<HttpPipelinePolicy> customPolicies = new ArrayList<>();
private ClientOptions clientOptions;
private RetryPolicy retryPolicy;
/**
* Set endpoint of the service.
*
* @param endpoint url of the service.
* @return CallingServerClientBuilder object.
*/
public CallingServerClientBuilder endpoint(String endpoint) {
this.endpoint = Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
return this;
}
/**
* Set endpoint of the service.
*
* @param pipeline HttpPipeline to use, if a pipeline is not supplied, the
* credential and httpClient fields must be set.
* @return CallingServerClientBuilder object.
*/
public CallingServerClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = Objects.requireNonNull(pipeline, "'pipeline' cannot be null.");
return this;
}
/**
* Sets the {@link TokenCredential} used to authenticate HTTP requests.
*
* @param tokenCredential {@link TokenCredential} used to authenticate HTTP
* requests.
* @return Updated {@link CallingServerClientBuilder} object.
* @throws NullPointerException If {@code tokenCredential} is null.
*/
public CallingServerClientBuilder credential(TokenCredential tokenCredential) {
this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null.");
return this;
}
/**
* Sets the {@link AzureKeyCredential} used to authenticate HTTP requests.
*
* @param keyCredential The {@link AzureKeyCredential} used to authenticate HTTP
* requests.
* @return Updated {@link CallingServerClientBuilder} object.
* @throws NullPointerException If {@code keyCredential} is null.
*/
CallingServerClientBuilder credential(AzureKeyCredential keyCredential) {
this.azureKeyCredential = Objects.requireNonNull(keyCredential, "'keyCredential' cannot be null.");
return this;
}
/**
* Set connectionString to use.
*
* @param connectionString connection string to set.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
this.connectionString = connectionString;
return this;
}
/**
* Sets the retry policy to use (using the RetryPolicy type).
*
* @param retryPolicy object to be applied
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "'retryPolicy' cannot be null.");
return this;
}
/**
* Sets the configuration object used to retrieve environment configuration
* values during building of the client.
*
* @param configuration Configuration store used to retrieve environment
* configurations.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder configuration(Configuration configuration) {
this.configuration = Objects.requireNonNull(configuration, "'configuration' cannot be null.");
return this;
}
/**
* Sets the {@link HttpLogOptions} for service requests.
*
* @param logOptions The logging configuration to use when sending and receiving
* HTTP requests/responses.
* @return The updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.httpLogOptions = Objects.requireNonNull(logOptions, "'logOptions' cannot be null.");
return this;
}
/**
* Sets the {@link CallingServerServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version of the client library will have the result of potentially moving to a newer service version.
* <p>
* Targeting a specific service version may also mean that the service will return an error for newer APIs.
*
* @param version {@link CallingServerServiceVersion} of the service to be used when making requests.
* @return Updated CallingServerClientBuilder object
*/
public CallingServerClientBuilder serviceVersion(CallingServerServiceVersion version) {
return this;
}
/**
* Set httpClient to use
*
* @param httpClient httpClient to use, overridden by the pipeline field.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Apply additional HttpPipelinePolicy
*
* @param customPolicy HttpPipelinePolicy object to be applied after
* AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
this.customPolicies.add(Objects.requireNonNull(customPolicy, "'customPolicy' cannot be null."));
return this;
}
/**
* Create asynchronous client applying HMACAuthenticationPolicy,
* UserAgentPolicy, RetryPolicy, and CookiePolicy. Additional HttpPolicies
* specified by additionalPolicies will be applied after them
*
* @return The updated {@link CallingServerClientBuilder} object.
*/
public CallingServerAsyncClient buildAsyncClient() {
return new CallingServerAsyncClient(createServiceImpl());
}
/**
* Create synchronous client applying HmacAuthenticationPolicy, UserAgentPolicy,
* RetryPolicy, and CookiePolicy. Additional HttpPolicies specified by
* additionalPolicies will be applied after them.
*
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClient buildClient() {
return new CallingServerClient(buildAsyncClient());
}
private AzureCommunicationCallingServerServiceImpl createServiceImpl() {
boolean isConnectionStringSet = connectionString != null && !connectionString.trim().isEmpty();
boolean isEndpointSet = endpoint != null && !endpoint.trim().isEmpty();
boolean isAzureKeyCredentialSet = azureKeyCredential != null;
boolean isTokenCredentialSet = tokenCredential != null;
if (isConnectionStringSet && isEndpointSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'connectionString' and 'endpoint' are set. Just one may be used."));
}
if (isConnectionStringSet && isAzureKeyCredentialSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'connectionString' and 'keyCredential' are set. Just one may be used."));
}
if (isConnectionStringSet && isTokenCredentialSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'connectionString' and 'tokenCredential' are set. Just one may be used."));
}
if (isAzureKeyCredentialSet && isTokenCredentialSet) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'tokenCredential' and 'keyCredential' are set. Just one may be used."));
}
if (isConnectionStringSet) {
CommunicationConnectionString connectionStringObject = new CommunicationConnectionString(connectionString);
String endpoint = connectionStringObject.getEndpoint();
String accessKey = connectionStringObject.getAccessKey();
endpoint(endpoint).credential(new AzureKeyCredential(accessKey));
}
Objects.requireNonNull(endpoint);
if (isTokenCredentialSet) {
try {
hostName = getHostNameFromEndpoint();
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new RuntimeException(e.getMessage()));
}
}
if (pipeline == null) {
Objects.requireNonNull(httpClient);
}
HttpPipeline builderPipeline = pipeline;
if (pipeline == null) {
builderPipeline = createHttpPipeline(httpClient);
}
AzureCommunicationCallingServerServiceImplBuilder clientBuilder = new AzureCommunicationCallingServerServiceImplBuilder();
clientBuilder.endpoint(endpoint).pipeline(builderPipeline);
return clientBuilder.buildClient();
}
/**
* Allows the user to set a variety of client-related options, such as
* user-agent string, headers, etc.
*
* @param clientOptions object to be applied.
* @return Updated {@link CallingServerClientBuilder} object.
*/
public CallingServerClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
private List<HttpPipelinePolicy> createHttpPipelineAuthPolicies() {
if (tokenCredential != null && azureKeyCredential != null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Both 'credential' and 'keyCredential' are set. Just one may be used."));
}
List<HttpPipelinePolicy> pipelinePolicies = new ArrayList<>();
if (tokenCredential != null) {
pipelinePolicies.add(new BearerTokenAuthenticationPolicy(tokenCredential,
"https:
pipelinePolicies.add(new TokenCredentialAddHostHeaderPolicy(hostName));
} else if (azureKeyCredential != null) {
pipelinePolicies.add(new HmacAuthenticationPolicy(azureKeyCredential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
return pipelinePolicies;
}
private HttpPipeline createHttpPipeline(HttpClient httpClient) {
if (pipeline != null) {
return pipeline;
}
List<HttpPipelinePolicy> policyList = new ArrayList<>();
ClientOptions buildClientOptions = (clientOptions == null) ? new ClientOptions() : clientOptions;
HttpLogOptions buildLogOptions = (httpLogOptions == null) ? new HttpLogOptions() : httpLogOptions;
String applicationId = null;
if (!CoreUtils.isNullOrEmpty(buildClientOptions.getApplicationId())) {
applicationId = buildClientOptions.getApplicationId();
} else if (!CoreUtils.isNullOrEmpty(buildLogOptions.getApplicationId())) {
applicationId = buildLogOptions.getApplicationId();
}
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policyList.add(new UserAgentPolicy(applicationId, clientName, clientVersion, configuration));
policyList.add(new RequestIdPolicy());
policyList.add((retryPolicy == null) ? new RetryPolicy() : retryPolicy);
policyList.add(new RedirectPolicy());
policyList.addAll(createHttpPipelineAuthPolicies());
policyList.add(new CookiePolicy());
if (!customPolicies.isEmpty()) {
policyList.addAll(customPolicies);
}
policyList.add(new HttpLoggingPolicy(getHttpLogOptions()));
return new HttpPipelineBuilder().policies(policyList.toArray(new HttpPipelinePolicy[0])).httpClient(httpClient)
.build();
}
private HttpLogOptions getHttpLogOptions() {
if (httpLogOptions == null) {
httpLogOptions = new HttpLogOptions();
}
return httpLogOptions;
}
} |
Could probably use auto-(un)boxing of bools for these if desired, i.e. `if (alreadySending && ...)`, `isSending.set(true)`. etc | private void sendNextInSequence(long seqId) {
Message msg = null;
synchronized (this) {
Queue<Message> queue = seqMap.get(seqId);
if (queue == null || queue.isEmpty()) {
seqMap.remove(seqId);
} else {
msg = queue.remove();
}
}
if (msg != null) {
Boolean alreadySending = isSending.get();
if ((alreadySending == Boolean.TRUE) && (msn != null)) {
msn.enqueue(new SequencedSendTask(msg));
} else {
isSending.set(Boolean.TRUE);
sequencedSend(msg);
}
isSending.set(Boolean.FALSE);
}
} | if ((alreadySending == Boolean.TRUE) && (msn != null)) { | private void sendNextInSequence(long seqId) {
Message msg = null;
synchronized (this) {
Queue<Message> queue = seqMap.get(seqId);
if (queue == null || queue.isEmpty()) {
seqMap.remove(seqId);
} else {
msg = queue.remove();
}
}
if (msg != null) {
Boolean alreadySending = isSending.get();
if (alreadySending && (msn != null)) {
msn.enqueue(new SequencedSendTask(msg));
} else {
isSending.set(Boolean.TRUE);
sequencedSend(msg);
}
isSending.set(Boolean.FALSE);
}
} | class SequencedSendTask implements Messenger.Task {
private final Message msg;
SequencedSendTask(Message msg) { this.msg = msg; }
@Override public void run() { sequencedSend(msg); }
@Override public void destroy() { msg.discard(); }
} | class SequencedSendTask implements Messenger.Task {
private final Message msg;
SequencedSendTask(Message msg) { this.msg = msg; }
@Override public void run() { sequencedSend(msg); }
@Override public void destroy() { msg.discard(); }
} |
If someone however augment identity inside non-JAXRS HTTP Perms, then there of course is different event. Because it is different authorization. This is authorization. | Uni<SecurityIdentity> getPermissionCheck(ResteasyReactiveRequestContext requestContext, SecurityIdentity identity) {
final RoutingContext routingContext = requestContext.unwrap(RoutingContext.class);
if (routingContext == null) {
throw new IllegalStateException(
"HTTP Security policy applied only on Quarkus REST cannot be run as 'RoutingContext' is null");
}
record SecurityCheckWithIdentity(SecurityIdentity identity, HttpSecurityPolicy.CheckResult checkResult) {
}
return jaxRsPathMatchingPolicy
.checkPermission(routingContext, identity == null ? getDeferredIdentity() : Uni.createFrom().item(identity),
authorizationRequestContext)
.flatMap(new Function<HttpSecurityPolicy.CheckResult, Uni<? extends SecurityCheckWithIdentity>>() {
@Override
public Uni<SecurityCheckWithIdentity> apply(HttpSecurityPolicy.CheckResult checkResult) {
if (identity != null) {
return Uni.createFrom().item(new SecurityCheckWithIdentity(identity, checkResult));
}
if (checkResult.isPermitted() && checkResult.getAugmentedIdentity() == null) {
return Uni.createFrom().item(new SecurityCheckWithIdentity(null, checkResult));
}
return getDeferredIdentity().map(new Function<SecurityIdentity, SecurityCheckWithIdentity>() {
@Override
public SecurityCheckWithIdentity apply(SecurityIdentity identity1) {
return new SecurityCheckWithIdentity(identity1, checkResult);
}
});
}
})
.map(new Function<SecurityCheckWithIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityCheckWithIdentity checkWithIdentity) {
final HttpSecurityPolicy.CheckResult checkResult = checkWithIdentity.checkResult();
final SecurityIdentity newIdentity;
if (checkResult.getAugmentedIdentity() == null) {
newIdentity = checkWithIdentity.identity();
} else if (checkResult.getAugmentedIdentity() != checkWithIdentity.identity()) {
newIdentity = checkResult.getAugmentedIdentity();
QuarkusHttpUser.setIdentity(newIdentity, routingContext);
identityAssociation.get().setIdentity(newIdentity);
} else {
newIdentity = checkResult.getAugmentedIdentity();
}
if (checkResult.isPermitted()) {
if (eventHelper.fireEventOnSuccess()) {
eventHelper.fireSuccessEvent(new AuthorizationSuccessEvent(newIdentity,
AbstractPathMatchingHttpSecurityPolicy.class.getName(),
Map.of(RoutingContext.class.getName(), routingContext,
AuthorizationSuccessEvent.SECURITY_IDENTITY_AUGMENTED,
isIdentityAugmented(checkWithIdentity, checkResult))));
}
return newIdentity;
}
final RuntimeException exception;
if (newIdentity.isAnonymous()) {
exception = new UnauthorizedException();
} else {
exception = new ForbiddenException();
}
if (eventHelper.fireEventOnFailure()) {
eventHelper.fireFailureEvent(new AuthorizationFailureEvent(newIdentity, exception,
AbstractPathMatchingHttpSecurityPolicy.class.getName(),
Map.of(RoutingContext.class.getName(), routingContext,
AuthorizationFailureEvent.SECURITY_IDENTITY_AUGMENTED,
isIdentityAugmented(checkWithIdentity, checkResult))));
}
throw exception;
}
private static boolean isIdentityAugmented(SecurityCheckWithIdentity checkWithIdentity,
HttpSecurityPolicy.CheckResult checkResult) {
return checkResult.getAugmentedIdentity() != null && !checkResult.getAugmentedIdentity().isAnonymous()
&& checkResult.getAugmentedIdentity() != checkWithIdentity.identity();
}
});
} | final RuntimeException exception; | Uni<SecurityIdentity> getPermissionCheck(ResteasyReactiveRequestContext requestContext, SecurityIdentity identity) {
final RoutingContext routingContext = requestContext.unwrap(RoutingContext.class);
if (routingContext == null) {
throw new IllegalStateException(
"HTTP Security policy applied only on Quarkus REST cannot be run as 'RoutingContext' is null");
}
record SecurityCheckWithIdentity(SecurityIdentity identity, HttpSecurityPolicy.CheckResult checkResult) {
}
return jaxRsPathMatchingPolicy
.checkPermission(routingContext, identity == null ? getDeferredIdentity() : Uni.createFrom().item(identity),
authorizationRequestContext)
.flatMap(new Function<HttpSecurityPolicy.CheckResult, Uni<? extends SecurityCheckWithIdentity>>() {
@Override
public Uni<SecurityCheckWithIdentity> apply(HttpSecurityPolicy.CheckResult checkResult) {
if (identity != null) {
return Uni.createFrom().item(new SecurityCheckWithIdentity(identity, checkResult));
}
if (checkResult.isPermitted() && checkResult.getAugmentedIdentity() == null) {
return Uni.createFrom().item(new SecurityCheckWithIdentity(null, checkResult));
}
return getDeferredIdentity().map(new Function<SecurityIdentity, SecurityCheckWithIdentity>() {
@Override
public SecurityCheckWithIdentity apply(SecurityIdentity identity1) {
return new SecurityCheckWithIdentity(identity1, checkResult);
}
});
}
})
.map(new Function<SecurityCheckWithIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityCheckWithIdentity checkWithIdentity) {
final HttpSecurityPolicy.CheckResult checkResult = checkWithIdentity.checkResult();
final SecurityIdentity newIdentity;
if (checkResult.getAugmentedIdentity() == null) {
newIdentity = checkWithIdentity.identity();
} else if (checkResult.getAugmentedIdentity() != checkWithIdentity.identity()) {
newIdentity = checkResult.getAugmentedIdentity();
QuarkusHttpUser.setIdentity(newIdentity, routingContext);
identityAssociation.get().setIdentity(newIdentity);
} else {
newIdentity = checkResult.getAugmentedIdentity();
}
if (checkResult.isPermitted()) {
if (eventHelper.fireEventOnSuccess()) {
eventHelper.fireSuccessEvent(new AuthorizationSuccessEvent(newIdentity,
AbstractPathMatchingHttpSecurityPolicy.class.getName(),
Map.of(RoutingContext.class.getName(), routingContext)));
}
return newIdentity;
}
final RuntimeException exception;
if (newIdentity.isAnonymous()) {
exception = new UnauthorizedException();
} else {
exception = new ForbiddenException();
}
if (eventHelper.fireEventOnFailure()) {
eventHelper.fireFailureEvent(new AuthorizationFailureEvent(newIdentity, exception,
AbstractPathMatchingHttpSecurityPolicy.class.getName(),
Map.of(RoutingContext.class.getName(), routingContext)));
}
throw exception;
}
});
} | class EagerSecurityContext {
static EagerSecurityContext instance = null;
private final HttpSecurityPolicy.AuthorizationRequestContext authorizationRequestContext;
final AbstractPathMatchingHttpSecurityPolicy jaxRsPathMatchingPolicy;
final SecurityEventHelper<AuthorizationSuccessEvent, AuthorizationFailureEvent> eventHelper;
final InjectableInstance<CurrentIdentityAssociation> identityAssociation;
final AuthorizationController authorizationController;
final SecurityCheckStorage securityCheckStorage;
final boolean doNotRunPermissionSecurityCheck;
EagerSecurityContext(Event<AuthorizationFailureEvent> authorizationFailureEvent,
@ConfigProperty(name = "quarkus.security.events.enabled") boolean securityEventsEnabled,
Event<AuthorizationSuccessEvent> authorizationSuccessEvent, BeanManager beanManager,
InjectableInstance<CurrentIdentityAssociation> identityAssociation, AuthorizationController authorizationController,
SecurityCheckStorage securityCheckStorage, HttpConfiguration httpConfig, BlockingSecurityExecutor blockingExecutor,
HttpBuildTimeConfig buildTimeConfig, Instance<HttpSecurityPolicy> installedPolicies) {
this.identityAssociation = identityAssociation;
this.authorizationController = authorizationController;
this.securityCheckStorage = securityCheckStorage;
this.eventHelper = new SecurityEventHelper<>(authorizationSuccessEvent, authorizationFailureEvent,
AUTHORIZATION_SUCCESS, AUTHORIZATION_FAILURE, beanManager, securityEventsEnabled);
var jaxRsPathMatchingPolicy = new AbstractPathMatchingHttpSecurityPolicy(httpConfig.auth.permissions,
httpConfig.auth.rolePolicy, buildTimeConfig.rootPath, installedPolicies, JAXRS);
if (jaxRsPathMatchingPolicy.hasNoPermissions()) {
this.jaxRsPathMatchingPolicy = null;
this.authorizationRequestContext = null;
this.doNotRunPermissionSecurityCheck = true;
} else {
this.jaxRsPathMatchingPolicy = jaxRsPathMatchingPolicy;
this.authorizationRequestContext = new DefaultAuthorizationRequestContext(blockingExecutor);
this.doNotRunPermissionSecurityCheck = false;
}
}
void initSingleton(@Observes StartupEvent event) {
instance = this;
}
void destroySingleton(@Observes ShutdownEvent event) {
instance = null;
}
Uni<SecurityIdentity> getDeferredIdentity() {
return Uni.createFrom().deferred(new Supplier<Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> get() {
return EagerSecurityContext.instance.identityAssociation.get().getDeferredIdentity();
}
});
}
static MethodDescription lazyMethodToMethodDescription(ResteasyReactiveResourceInfo lazyMethod) {
return new MethodDescription(lazyMethod.getActualDeclaringClassName(),
lazyMethod.getName(), MethodDescription.typesAsStrings(lazyMethod.getParameterTypes()));
}
} | class EagerSecurityContext {
static EagerSecurityContext instance = null;
private final HttpSecurityPolicy.AuthorizationRequestContext authorizationRequestContext;
final AbstractPathMatchingHttpSecurityPolicy jaxRsPathMatchingPolicy;
final SecurityEventHelper<AuthorizationSuccessEvent, AuthorizationFailureEvent> eventHelper;
final InjectableInstance<CurrentIdentityAssociation> identityAssociation;
final EagerSecurityInterceptorStorage interceptorStorage;
final AuthorizationController authorizationController;
final SecurityCheckStorage securityCheckStorage;
final boolean doNotRunPermissionSecurityCheck;
final boolean isProactiveAuthDisabled;
EagerSecurityContext(Event<AuthorizationFailureEvent> authorizationFailureEvent,
@ConfigProperty(name = "quarkus.security.events.enabled") boolean securityEventsEnabled,
Event<AuthorizationSuccessEvent> authorizationSuccessEvent, BeanManager beanManager,
InjectableInstance<CurrentIdentityAssociation> identityAssociation, AuthorizationController authorizationController,
SecurityCheckStorage securityCheckStorage, HttpConfiguration httpConfig, BlockingSecurityExecutor blockingExecutor,
HttpBuildTimeConfig buildTimeConfig, Instance<HttpSecurityPolicy> installedPolicies) {
var interceptorStorageHandle = Arc.container().instance(EagerSecurityInterceptorStorage.class);
this.interceptorStorage = interceptorStorageHandle.isAvailable() ? interceptorStorageHandle.get() : null;
this.isProactiveAuthDisabled = !buildTimeConfig.auth.proactive;
this.identityAssociation = identityAssociation;
this.authorizationController = authorizationController;
this.securityCheckStorage = securityCheckStorage;
this.eventHelper = new SecurityEventHelper<>(authorizationSuccessEvent, authorizationFailureEvent,
AUTHORIZATION_SUCCESS, AUTHORIZATION_FAILURE, beanManager, securityEventsEnabled);
var jaxRsPathMatchingPolicy = new AbstractPathMatchingHttpSecurityPolicy(httpConfig.auth.permissions,
httpConfig.auth.rolePolicy, buildTimeConfig.rootPath, installedPolicies, JAXRS);
if (jaxRsPathMatchingPolicy.hasNoPermissions()) {
this.jaxRsPathMatchingPolicy = null;
this.authorizationRequestContext = null;
this.doNotRunPermissionSecurityCheck = true;
} else {
this.jaxRsPathMatchingPolicy = jaxRsPathMatchingPolicy;
this.authorizationRequestContext = new DefaultAuthorizationRequestContext(blockingExecutor);
this.doNotRunPermissionSecurityCheck = false;
}
}
void initSingleton(@Observes StartupEvent event) {
instance = this;
}
void destroySingleton(@Observes ShutdownEvent event) {
instance = null;
}
Uni<SecurityIdentity> getDeferredIdentity() {
return Uni.createFrom().deferred(new Supplier<Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> get() {
return EagerSecurityContext.instance.identityAssociation.get().getDeferredIdentity();
}
});
}
static MethodDescription lazyMethodToMethodDescription(ResteasyReactiveResourceInfo lazyMethod) {
return new MethodDescription(lazyMethod.getActualDeclaringClassName(),
lazyMethod.getName(), MethodDescription.typesAsStrings(lazyMethod.getParameterTypes()));
}
} |
File reading will have problems when packaged into jar | public byte[] toConfigurationBytes() {
try (InputStream stream = inClasspath ? ShardingSphereDriverURL.class.getResourceAsStream("/" + file) : Files.newInputStream(new File(file).toPath())) {
LineProcessor<byte[]> lineProcessor = new LineProcessor<byte[]>() {
private final StringBuilder builder = new StringBuilder();
@Override
public boolean processLine(final String line) {
if (line.startsWith("
return true;
}
builder.append(line);
return true;
}
@Override
public byte[] getResult() {
return builder.toString().getBytes(StandardCharsets.UTF_8);
}
};
final byte[] result = CharStreams.readLines(new InputStreamReader(stream, Charsets.UTF_8), lineProcessor);
Objects.requireNonNull(stream, String.format("Can not find configuration file `%s`.", file)).read(result);
return result;
}
} | public byte[] toConfigurationBytes() {
try (InputStream stream = inClasspath ? ShardingSphereDriverURL.class.getResourceAsStream("/" + file) : Files.newInputStream(new File(file).toPath())) {
Objects.requireNonNull(stream, String.format("Can not find configuration file `%s`.", file));
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
StringBuilder builder = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
if (!line.startsWith("
builder.append(line);
}
}
return builder.toString().getBytes(StandardCharsets.UTF_8);
}
} | class ShardingSphereDriverURL {
private static final String CLASSPATH_TYPE = "classpath:";
private final String file;
private final boolean inClasspath;
public ShardingSphereDriverURL(final String url) {
String configuredFile = url.substring("jdbc:shardingsphere:".length(), url.contains("?") ? url.indexOf("?") : url.length());
if (configuredFile.startsWith(CLASSPATH_TYPE)) {
file = configuredFile.substring(CLASSPATH_TYPE.length());
inClasspath = true;
} else {
file = configuredFile;
inClasspath = false;
}
Preconditions.checkArgument(!file.isEmpty(), "Configuration file is required in ShardingSphere driver URL.");
}
/**
* Generate to configuration bytes.
*
* @return generated configuration bytes
*/
@SneakyThrows(IOException.class)
} | class ShardingSphereDriverURL {
private static final String CLASSPATH_TYPE = "classpath:";
private final String file;
private final boolean inClasspath;
public ShardingSphereDriverURL(final String url) {
String configuredFile = url.substring("jdbc:shardingsphere:".length(), url.contains("?") ? url.indexOf("?") : url.length());
if (configuredFile.startsWith(CLASSPATH_TYPE)) {
file = configuredFile.substring(CLASSPATH_TYPE.length());
inClasspath = true;
} else {
file = configuredFile;
inClasspath = false;
}
Preconditions.checkArgument(!file.isEmpty(), "Configuration file is required in ShardingSphere driver URL.");
}
/**
* Generate to configuration bytes.
*
* @return generated configuration bytes
*/
@SneakyThrows(IOException.class)
} |
|
Is this really necessary? I vaguely remember removing this because it was deprecated. | public EntityManagerFactory build() {
try {
final SessionFactoryOptionsBuilder optionsBuilder = metadata.buildSessionFactoryOptionsBuilder();
populate(persistenceUnitName, optionsBuilder, standardServiceRegistry);
return new SessionFactoryImpl(metadata, optionsBuilder.buildOptions(),
metadata.getTypeConfiguration().getMetadataBuildingContext().getBootstrapContext());
} catch (Exception e) {
throw persistenceException("Unable to build Hibernate SessionFactory", e);
}
} | metadata.getTypeConfiguration().getMetadataBuildingContext().getBootstrapContext()); | public EntityManagerFactory build() {
try {
final SessionFactoryOptionsBuilder optionsBuilder = metadata.buildSessionFactoryOptionsBuilder();
populate(persistenceUnitName, optionsBuilder, standardServiceRegistry);
return new SessionFactoryImpl(metadata, optionsBuilder.buildOptions(),
metadata.getTypeConfiguration().getMetadataBuildingContext().getBootstrapContext());
} catch (Exception e) {
throw persistenceException("Unable to build Hibernate SessionFactory", e);
}
} | class FastBootEntityManagerFactoryBuilder implements EntityManagerFactoryBuilder {
protected final PrevalidatedQuarkusMetadata metadata;
protected final String persistenceUnitName;
protected final StandardServiceRegistry standardServiceRegistry;
private final RuntimeSettings runtimeSettings;
private final Object validatorFactory;
private final Object cdiBeanManager;
protected final MultiTenancyStrategy multiTenancyStrategy;
public FastBootEntityManagerFactoryBuilder(
PrevalidatedQuarkusMetadata metadata, String persistenceUnitName,
StandardServiceRegistry standardServiceRegistry, RuntimeSettings runtimeSettings, Object validatorFactory,
Object cdiBeanManager, MultiTenancyStrategy multiTenancyStrategy) {
this.metadata = metadata;
this.persistenceUnitName = persistenceUnitName;
this.standardServiceRegistry = standardServiceRegistry;
this.runtimeSettings = runtimeSettings;
this.validatorFactory = validatorFactory;
this.cdiBeanManager = cdiBeanManager;
this.multiTenancyStrategy = multiTenancyStrategy;
}
@Override
public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) {
return null;
}
@Override
public EntityManagerFactoryBuilder withDataSource(DataSource dataSource) {
return null;
}
@Override
@Override
public void cancel() {
}
@Override
public void generateSchema() {
try {
SchemaManagementToolCoordinator.process(metadata, standardServiceRegistry, runtimeSettings.getSettings(),
DelayedDropRegistryNotAvailableImpl.INSTANCE);
} catch (Exception e) {
throw persistenceException("Error performing schema management", e);
}
cancel();
}
protected PersistenceException persistenceException(String message, Exception cause) {
Throwable t = cause;
while (t != null) {
if (t instanceof NoSuchAlgorithmException) {
message += "Unable to enable SSL support. You might be in the case where you used the `quarkus.ssl.native=false` configuration"
+ " and SSL was not disabled automatically for your driver.";
break;
}
if (t instanceof CommandAcceptanceException) {
message = "Invalid import file. Make sure your statements are valid and properly separated by a semi-colon.";
break;
}
t = t.getCause();
}
return new PersistenceException(getExceptionHeader() + message, cause);
}
private String getExceptionHeader() {
return "[PersistenceUnit: " + persistenceUnitName + "] ";
}
protected void populate(String persistenceUnitName, SessionFactoryOptionsBuilder options, StandardServiceRegistry ssr) {
final boolean jtaTransactionAccessEnabled = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_JTA_TRANSACTION_ACCESS);
if (!jtaTransactionAccessEnabled) {
options.disableJtaTransactionAccess();
}
final boolean allowRefreshDetachedEntity = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_REFRESH_DETACHED_ENTITY);
if (!allowRefreshDetachedEntity) {
options.disableRefreshDetachedEntity();
}
final Object legacyObserver = runtimeSettings.get("hibernate.ejb.session_factory_observer");
if (legacyObserver != null) {
throw new HibernateException("Legacy setting being used: 'hibernate.ejb.session_factory_observer' was replaced by '"
+ org.hibernate.cfg.AvailableSettings.SESSION_FACTORY_OBSERVER + "'. Please update your configuration.");
}
final Object sessionFactoryObserverSetting = runtimeSettings
.get(org.hibernate.cfg.AvailableSettings.SESSION_FACTORY_OBSERVER);
if (sessionFactoryObserverSetting != null) {
final StrategySelector strategySelector = ssr.getService(StrategySelector.class);
final SessionFactoryObserver suppliedSessionFactoryObserver = strategySelector
.resolveStrategy(SessionFactoryObserver.class, sessionFactoryObserverSetting);
options.addSessionFactoryObservers(suppliedSessionFactoryObserver);
}
options.addSessionFactoryObservers(new ServiceRegistryCloser());
options.addSessionFactoryObservers(new SessionFactoryObserverForNamedQueryValidation(metadata));
options.addSessionFactoryObservers(new SessionFactoryObserverForSchemaExport(metadata));
options.addSessionFactoryObservers(new QuarkusSessionFactoryObserverForDbVersionCheck());
options.applyEntityNotFoundDelegate(new JpaEntityNotFoundDelegate());
options.enableCollectionInDefaultFetchGroup(true);
if (this.validatorFactory != null) {
options.applyValidatorFactory(validatorFactory);
}
if (this.cdiBeanManager != null) {
options.applyBeanManager(cdiBeanManager);
}
BytecodeProvider bytecodeProvider = ssr.getService(BytecodeProvider.class);
options.addSessionFactoryObservers(new SessionFactoryObserverForBytecodeEnhancer(bytecodeProvider));
if (options.isMultiTenancyEnabled()
|| (multiTenancyStrategy != null && multiTenancyStrategy != MultiTenancyStrategy.NONE)) {
options.applyCurrentTenantIdentifierResolver(new HibernateCurrentTenantIdentifierResolver(persistenceUnitName));
}
InjectableInstance<Interceptor> interceptorInstance = PersistenceUnitUtil.singleExtensionInstanceForPersistenceUnit(
Interceptor.class, persistenceUnitName);
if (!interceptorInstance.isUnsatisfied()) {
options.applyStatelessInterceptorSupplier(interceptorInstance::get);
}
InjectableInstance<StatementInspector> statementInspectorInstance = PersistenceUnitUtil
.singleExtensionInstanceForPersistenceUnit(StatementInspector.class, persistenceUnitName);
if (!statementInspectorInstance.isUnsatisfied()) {
options.applyStatementInspector(statementInspectorInstance.get());
}
}
private static class ServiceRegistryCloser implements SessionFactoryObserver {
@Override
public void sessionFactoryCreated(SessionFactory sessionFactory) {
}
@Override
public void sessionFactoryClosed(SessionFactory sessionFactory) {
SessionFactoryImplementor sfi = ((SessionFactoryImplementor) sessionFactory);
sfi.getServiceRegistry().destroy();
ServiceRegistry basicRegistry = sfi.getServiceRegistry().getParentServiceRegistry();
((ServiceRegistryImplementor) basicRegistry).destroy();
}
}
private static class JpaEntityNotFoundDelegate implements EntityNotFoundDelegate, Serializable {
public void handleEntityNotFound(String entityName, Object id) {
throw new EntityNotFoundException("Unable to find " + entityName + " with id " + id);
}
}
@Override
public ManagedResources getManagedResources() {
throw new IllegalStateException("This method is not available at runtime in Quarkus");
}
@Override
public MetadataImplementor metadata() {
return metadata;
}
} | class FastBootEntityManagerFactoryBuilder implements EntityManagerFactoryBuilder {
protected final PrevalidatedQuarkusMetadata metadata;
protected final String persistenceUnitName;
protected final StandardServiceRegistry standardServiceRegistry;
private final RuntimeSettings runtimeSettings;
private final Object validatorFactory;
private final Object cdiBeanManager;
protected final MultiTenancyStrategy multiTenancyStrategy;
public FastBootEntityManagerFactoryBuilder(
PrevalidatedQuarkusMetadata metadata, String persistenceUnitName,
StandardServiceRegistry standardServiceRegistry, RuntimeSettings runtimeSettings, Object validatorFactory,
Object cdiBeanManager, MultiTenancyStrategy multiTenancyStrategy) {
this.metadata = metadata;
this.persistenceUnitName = persistenceUnitName;
this.standardServiceRegistry = standardServiceRegistry;
this.runtimeSettings = runtimeSettings;
this.validatorFactory = validatorFactory;
this.cdiBeanManager = cdiBeanManager;
this.multiTenancyStrategy = multiTenancyStrategy;
}
@Override
public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) {
return null;
}
@Override
public EntityManagerFactoryBuilder withDataSource(DataSource dataSource) {
return null;
}
@Override
@Override
public void cancel() {
}
@Override
public void generateSchema() {
try {
SchemaManagementToolCoordinator.process(metadata, standardServiceRegistry, runtimeSettings.getSettings(),
DelayedDropRegistryNotAvailableImpl.INSTANCE);
} catch (Exception e) {
throw persistenceException("Error performing schema management", e);
}
cancel();
}
protected PersistenceException persistenceException(String message, Exception cause) {
Throwable t = cause;
while (t != null) {
if (t instanceof NoSuchAlgorithmException) {
message += "Unable to enable SSL support. You might be in the case where you used the `quarkus.ssl.native=false` configuration"
+ " and SSL was not disabled automatically for your driver.";
break;
}
if (t instanceof CommandAcceptanceException) {
message = "Invalid import file. Make sure your statements are valid and properly separated by a semi-colon.";
break;
}
t = t.getCause();
}
return new PersistenceException(getExceptionHeader() + message, cause);
}
private String getExceptionHeader() {
return "[PersistenceUnit: " + persistenceUnitName + "] ";
}
protected void populate(String persistenceUnitName, SessionFactoryOptionsBuilder options, StandardServiceRegistry ssr) {
final boolean jtaTransactionAccessEnabled = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_JTA_TRANSACTION_ACCESS);
if (!jtaTransactionAccessEnabled) {
options.disableJtaTransactionAccess();
}
final boolean allowRefreshDetachedEntity = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_REFRESH_DETACHED_ENTITY);
if (!allowRefreshDetachedEntity) {
options.disableRefreshDetachedEntity();
}
final Object legacyObserver = runtimeSettings.get("hibernate.ejb.session_factory_observer");
if (legacyObserver != null) {
throw new HibernateException("Legacy setting being used: 'hibernate.ejb.session_factory_observer' was replaced by '"
+ org.hibernate.cfg.AvailableSettings.SESSION_FACTORY_OBSERVER + "'. Please update your configuration.");
}
final Object sessionFactoryObserverSetting = runtimeSettings
.get(org.hibernate.cfg.AvailableSettings.SESSION_FACTORY_OBSERVER);
if (sessionFactoryObserverSetting != null) {
final StrategySelector strategySelector = ssr.getService(StrategySelector.class);
final SessionFactoryObserver suppliedSessionFactoryObserver = strategySelector
.resolveStrategy(SessionFactoryObserver.class, sessionFactoryObserverSetting);
options.addSessionFactoryObservers(suppliedSessionFactoryObserver);
}
options.addSessionFactoryObservers(new ServiceRegistryCloser());
options.addSessionFactoryObservers(new SessionFactoryObserverForNamedQueryValidation(metadata));
options.addSessionFactoryObservers(new SessionFactoryObserverForSchemaExport(metadata));
options.addSessionFactoryObservers(new QuarkusSessionFactoryObserverForDbVersionCheck());
options.applyEntityNotFoundDelegate(new JpaEntityNotFoundDelegate());
options.enableCollectionInDefaultFetchGroup(true);
if (this.validatorFactory != null) {
options.applyValidatorFactory(validatorFactory);
}
if (this.cdiBeanManager != null) {
options.applyBeanManager(cdiBeanManager);
}
BytecodeProvider bytecodeProvider = ssr.getService(BytecodeProvider.class);
options.addSessionFactoryObservers(new SessionFactoryObserverForBytecodeEnhancer(bytecodeProvider));
if (options.isMultiTenancyEnabled()
|| (multiTenancyStrategy != null && multiTenancyStrategy != MultiTenancyStrategy.NONE)) {
options.applyCurrentTenantIdentifierResolver(new HibernateCurrentTenantIdentifierResolver(persistenceUnitName));
}
InjectableInstance<Interceptor> interceptorInstance = PersistenceUnitUtil.singleExtensionInstanceForPersistenceUnit(
Interceptor.class, persistenceUnitName);
if (!interceptorInstance.isUnsatisfied()) {
options.applyStatelessInterceptorSupplier(interceptorInstance::get);
}
InjectableInstance<StatementInspector> statementInspectorInstance = PersistenceUnitUtil
.singleExtensionInstanceForPersistenceUnit(StatementInspector.class, persistenceUnitName);
if (!statementInspectorInstance.isUnsatisfied()) {
options.applyStatementInspector(statementInspectorInstance.get());
}
}
private static class ServiceRegistryCloser implements SessionFactoryObserver {
@Override
public void sessionFactoryCreated(SessionFactory sessionFactory) {
}
@Override
public void sessionFactoryClosed(SessionFactory sessionFactory) {
SessionFactoryImplementor sfi = ((SessionFactoryImplementor) sessionFactory);
sfi.getServiceRegistry().destroy();
ServiceRegistry basicRegistry = sfi.getServiceRegistry().getParentServiceRegistry();
((ServiceRegistryImplementor) basicRegistry).destroy();
}
}
private static class JpaEntityNotFoundDelegate implements EntityNotFoundDelegate, Serializable {
public void handleEntityNotFound(String entityName, Object id) {
throw new EntityNotFoundException("Unable to find " + entityName + " with id " + id);
}
}
@Override
public ManagedResources getManagedResources() {
throw new IllegalStateException("This method is not available at runtime in Quarkus");
}
@Override
public MetadataImplementor metadata() {
return metadata;
}
} |
Lets add some comments to explain what we are doing here. Also, why do we omit, if `hasInvalidFieldIndex` is true? | public void visit(BLangRecordLiteral recordLiteral) {
BType actualType = symTable.errType;
int expTypeTag = expType.tag;
BType originalExpType = expType;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.ANY) {
expType = symTable.mapType;
}
if (expTypeTag == TypeTags.ANY
|| (expTypeTag == TypeTags.MAP && recordLiteral.keyValuePairs.isEmpty())
|| expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_RECORD_LITERAL, originalExpType);
resultType = symTable.errType;
return;
}
List<BType> matchedTypeList = getRecordCompatibleType(expType, recordLiteral);
boolean hasInvalidFieldIndex = false;
if (matchedTypeList.isEmpty()) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
} else if (matchedTypeList.size() > 1) {
dlog.error(recordLiteral.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
} else {
for (BLangRecordKeyValue keyValuePair : recordLiteral.keyValuePairs) {
BType type = checkRecLiteralKeyValue(keyValuePair, matchedTypeList.get(0));
hasInvalidFieldIndex = type.tag == TypeTags.ERROR;
}
actualType = matchedTypeList.get(0);
}
resultType = types.checkType(recordLiteral, actualType, expType);
if (recordLiteral.type.tag == TypeTags.RECORD && !hasInvalidFieldIndex) {
BRecordType recordType = (BRecordType) recordLiteral.type;
int maskOptional = Flags.asMask(EnumSet.of(Flag.OPTIONAL));
int maskDefaultable = Flags.asMask(EnumSet.of(Flag.DEFAULTABLE));
recordType.fields.forEach(field -> {
boolean hasField = recordLiteral.keyValuePairs.stream().anyMatch(
keyVal -> field.name.value.equals(((BLangSimpleVarRef) keyVal.key.expr).variableName.value));
if (!hasField && !Symbols.isFlagOn(field.symbol.flags, maskOptional) &&
(!types.defaultValueExists(recordLiteral.pos, field.type) &&
!Symbols.isFlagOn(field.symbol.flags, maskDefaultable))) {
dlog.error(recordLiteral.pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
} | if (recordLiteral.type.tag == TypeTags.RECORD && !hasInvalidFieldIndex) { | public void visit(BLangRecordLiteral recordLiteral) {
BType actualType = symTable.errType;
int expTypeTag = expType.tag;
BType originalExpType = expType;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.ANY) {
expType = symTable.mapType;
}
if (expTypeTag == TypeTags.ANY || expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_RECORD_LITERAL, originalExpType);
resultType = symTable.errType;
return;
}
List<BType> matchedTypeList = getRecordCompatibleType(expType, recordLiteral);
if (matchedTypeList.isEmpty()) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
} else if (matchedTypeList.size() > 1) {
dlog.error(recordLiteral.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
} else {
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, matchedTypeList.get(0)));
actualType = matchedTypeList.get(0);
}
resultType = types.checkType(recordLiteral, actualType, expType);
if (recordLiteral.type.tag == TypeTags.RECORD) {
checkMissingRequiredFields((BRecordType) recordLiteral.type, recordLiteral.keyValuePairs,
recordLiteral.pos);
}
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY =
new CompilerContext.Key<>();
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private Types types;
private IterableAnalyzer iterableAnalyzer;
private SemanticAnalyzer semanticAnalyzer;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.types = Types.getInstance(context);
this.iterableAnalyzer = IterableAnalyzer.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticCode.INCOMPATIBLE_TYPES);
}
/**
* Check the given list of expressions against the given expected types.
*
* @param exprs list of expressions to be analyzed
* @param env current symbol environment
* @param expType expected type
* @return the actual types of the given list of expressions
*/
public List<BType> checkExprs(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> resTypes = new ArrayList<>(exprs.size());
for (BLangExpression expr : exprs) {
resTypes.add(checkExpr(expr, env, expType));
}
return resTypes;
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
expr.accept(this);
expr.type = resultType;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
return resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = symTable.getTypeFromTag(literalExpr.typeTag);
Object literalValue = literalExpr.value;
if (TypeTags.FLOAT == expType.tag && TypeTags.INT == literalType.tag) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
}
if (TypeTags.BYTE == expType.tag && TypeTags.INT == literalType.tag) {
if (!isByteLiteralValue((Long) literalValue)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, literalType);
return;
}
literalType = symTable.byteType;
literalExpr.value = ((Long) literalValue).byteValue();
}
if (TypeTags.BYTE_ARRAY == literalExpr.typeTag) {
literalType = new BArrayType(symTable.byteType);
}
if (this.expType.tag == TypeTags.FINITE) {
BFiniteType expType = (BFiniteType) this.expType;
boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr);
if (foundMember) {
types.setImplicitCastExpr(literalExpr, literalType, this.expType);
resultType = literalType;
return;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.memberTypes
.stream()
.map(memberType -> types.isAssignableToFiniteType(memberType, literalExpr))
.anyMatch(foundType -> foundType);
if (foundMember) {
types.setImplicitCastExpr(literalExpr, literalType, this.expType);
resultType = literalType;
return;
}
}
resultType = types.checkType(literalExpr, literalType, expType);
}
private static boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
public void visit(BLangTableLiteral tableLiteral) {
if (expType.tag == symTable.errType.tag) {
return;
}
BType tableConstraint = ((BTableType) expType).getConstraint();
if (tableConstraint.tag == TypeTags.NONE) {
dlog.error(tableLiteral.pos, DiagnosticCode.TABLE_CANNOT_BE_CREATED_WITHOUT_CONSTRAINT);
return;
}
validateTableColumns(tableConstraint, tableLiteral);
checkExprs(tableLiteral.tableDataRows, this.env, tableConstraint);
resultType = types.checkType(tableLiteral, expType, symTable.noType);
}
private void validateTableColumns(BType tableConstraint, BLangTableLiteral tableLiteral) {
if (tableConstraint.tag != TypeTags.ERROR) {
List<String> columnNames = new ArrayList<>();
for (BField field : ((BRecordType) tableConstraint).fields) {
columnNames.add(field.getName().getValue());
}
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
boolean contains = columnNames.contains(column.columnName);
if (!contains) {
dlog.error(tableLiteral.pos, DiagnosticCode.UNDEFINED_TABLE_COLUMN, column.columnName,
tableConstraint);
}
}
}
}
public void visit(BLangArrayLiteral arrayLiteral) {
BType actualType = symTable.errType;
if (expType.tag == TypeTags.ANY) {
dlog.error(arrayLiteral.pos, DiagnosticCode.INVALID_ARRAY_LITERAL, expType);
resultType = symTable.errType;
return;
}
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.JSON) {
checkExprs(arrayLiteral.exprs, this.env, expType);
actualType = expType;
} else if (expTypeTag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) expType;
if (arrayType.state == BArrayState.OPEN_SEALED) {
arrayType.size = arrayLiteral.exprs.size();
arrayType.state = BArrayState.CLOSED_SEALED;
} else if (arrayType.state != BArrayState.UNSEALED && arrayType.size != arrayLiteral.exprs.size()) {
dlog.error(arrayLiteral.pos,
DiagnosticCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, arrayLiteral.exprs.size());
resultType = symTable.errType;
return;
}
checkExprs(arrayLiteral.exprs, this.env, arrayType.eType);
actualType = arrayType;
} else if (expTypeTag != TypeTags.ERROR) {
List<BType> resTypes = checkExprs(arrayLiteral.exprs, this.env, symTable.noType);
Set<BType> arrayLitExprTypeSet = new HashSet<>(resTypes);
BType[] uniqueExprTypes = arrayLitExprTypeSet.toArray(new BType[0]);
if (uniqueExprTypes.length == 0) {
actualType = symTable.anyType;
} else if (uniqueExprTypes.length == 1) {
actualType = resTypes.get(0);
} else {
BType superType = uniqueExprTypes[0];
for (int i = 1; i < uniqueExprTypes.length; i++) {
if (types.isAssignable(superType, uniqueExprTypes[i])) {
superType = uniqueExprTypes[i];
} else if (!types.isAssignable(uniqueExprTypes[i], superType)) {
superType = symTable.anyType;
break;
}
}
actualType = superType;
}
actualType = new BArrayType(actualType, null, arrayLiteral.exprs.size(), BArrayState.UNSEALED);
List<BType> arrayCompatibleType = getArrayCompatibleTypes(expType, actualType);
if (arrayCompatibleType.isEmpty()) {
dlog.error(arrayLiteral.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
} else if (arrayCompatibleType.size() > 1) {
dlog.error(arrayLiteral.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
} else if (arrayCompatibleType.get(0).tag == TypeTags.ANY) {
dlog.error(arrayLiteral.pos, DiagnosticCode.INVALID_ARRAY_LITERAL, expType);
} else if (arrayCompatibleType.get(0).tag == TypeTags.ARRAY) {
checkExprs(arrayLiteral.exprs, this.env, ((BArrayType) arrayCompatibleType.get(0)).eType);
}
}
resultType = types.checkType(arrayLiteral, actualType, expType);
}
private List<BType> getRecordCompatibleType(BType bType, BLangRecordLiteral recordLiteral) {
if (bType.tag == TypeTags.UNION) {
Set<BType> expTypes = ((BUnionType) bType).memberTypes;
return expTypes.stream()
.filter(type -> type.tag == TypeTags.JSON ||
type.tag == TypeTags.MAP ||
(type.tag == TypeTags.RECORD && !((BRecordType) type).sealed) ||
(type.tag == TypeTags.RECORD
&& ((BRecordType) type).sealed
&& isRecordLiteralCompatible((BRecordType) type, recordLiteral)))
.collect(Collectors.toList());
} else {
switch (expType.tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return new ArrayList<>(Collections.singleton(expType));
default:
return Collections.emptyList();
}
}
}
private boolean isRecordLiteralCompatible(BRecordType bRecordType, BLangRecordLiteral recordLiteral) {
for (BLangRecordKeyValue literalKeyValuePair : recordLiteral.getKeyValuePairs()) {
boolean matched = false;
for (BField field : bRecordType.getFields()) {
matched = ((BLangSimpleVarRef) literalKeyValuePair.getKey()).variableName.value
.equals(field.getName().getValue());
if (matched) {
break;
}
}
if (!matched) {
return false;
}
}
return true;
}
private List<BType> getArrayCompatibleTypes(BType expType, BType actualType) {
Set<BType> expTypes =
expType.tag == TypeTags.UNION ? ((BUnionType) expType).memberTypes : new HashSet<BType>() {
{
add(expType);
}
};
return expTypes.stream()
.filter(type -> types.isAssignable(actualType, type) ||
type.tag == TypeTags.NONE ||
type.tag == TypeTags.ANY)
.collect(Collectors.toList());
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.errType;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.lhsVar) {
varRefExpr.type = this.symTable.noType;
} else {
varRefExpr.type = this.symTable.errType;
dlog.error(varRefExpr.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, varName, env.enclPkg.symbol.pkgID, actualType, env.scope.owner);
resultType = varRefExpr.type;
return;
}
varRefExpr.pkgSymbol = symResolver.resolveImportSymbol(varRefExpr.pos,
env, names.fromIdNode(varRefExpr.pkgAlias));
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName, SymTag.VARIABLE_NAME);
if (symbol == symTable.notFoundSymbol && env.enclTypeDefinition != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclTypeDefinition.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclTypeDefinition.symbol.type.tsymbol);
}
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSefReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
BLangInvokableNode encInvokable = env.enclInvokable;
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) &&
!(symbol.owner instanceof BPackageSymbol)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol closureVarSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE_NAME);
if (closureVarSymbol != symTable.notFoundSymbol &&
!isFunctionArgument(closureVarSymbol, env.enclInvokable.requiredParams)) {
((BLangFunction) env.enclInvokable).closureVarSymbols.add((BVarSymbol) closureVarSymbol);
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol closureVarSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE_NAME);
if (closureVarSymbol != symTable.notFoundSymbol &&
!isFunctionArgument(closureVarSymbol, ((BLangArrowFunction) env.node).params)) {
((BLangArrowFunction) env.node).closureVarSymbols.add((BVarSymbol) closureVarSymbol);
}
}
} else if ((symbol.tag & SymTag.TYPE) == SymTag.TYPE) {
actualType = symTable.typeDesc;
varRefExpr.symbol = symbol;
} else {
dlog.error(varRefExpr.pos, DiagnosticCode.UNDEFINED_SYMBOL, varName.toString());
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticCode.SEALED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.OPEN_SEALED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.type.tag == symbol.type.tag));
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
fieldAccessExpr.expr.lhsVar = fieldAccessExpr.lhsVar;
BType varRefType = getTypeOfExprInFieldAccess(fieldAccessExpr.expr);
if (fieldAccessExpr.fieldKind == FieldKind.ALL && varRefType.tag != TypeTags.XML) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_GET_ALL_FIELDS, varRefType);
}
if (fieldAccessExpr.lhsVar && fieldAccessExpr.safeNavigate) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.INVALID_ERROR_LIFTING_ON_LHS);
resultType = symTable.errType;
return;
}
varRefType = getSafeType(varRefType, fieldAccessExpr);
Name fieldName = names.fromIdNode(fieldAccessExpr.field);
BType actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (fieldAccessExpr.lhsVar) {
fieldAccessExpr.originalType = actualType;
fieldAccessExpr.type = actualType;
resultType = actualType;
return;
}
actualType = getAccessExprFinalType(fieldAccessExpr, actualType);
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
indexBasedAccessExpr.expr.lhsVar = indexBasedAccessExpr.lhsVar;
checkExpr(indexBasedAccessExpr.expr, this.env, symTable.noType);
BType varRefType = indexBasedAccessExpr.expr.type;
varRefType = getSafeType(varRefType, indexBasedAccessExpr);
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr, varRefType);
if (indexBasedAccessExpr.lhsVar) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.type = actualType;
resultType = actualType;
return;
}
actualType = getAccessExprFinalType(indexBasedAccessExpr, actualType);
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(iExpr.pos, DiagnosticCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return;
}
final BType exprType = checkExpr(iExpr.expr, this.env, symTable.noType);
if (isIterableOperationInvocation(iExpr)) {
iExpr.iterableOperationInvocation = true;
iterableAnalyzer.handlerIterableOperation(iExpr, expType, env);
resultType = iExpr.iContext.operations.getLast().resultType;
return;
}
if (iExpr.actionInvocation) {
checkActionInvocationExpr(iExpr, exprType);
return;
}
BType varRefType = iExpr.expr.type;
varRefType = getSafeType(varRefType, iExpr);
switch (varRefType.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
checkFunctionInvocationExpr(iExpr, (BStructureType) varRefType);
break;
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.INT:
case TypeTags.FLOAT:
case TypeTags.XML:
checkFunctionInvocationExpr(iExpr, varRefType);
break;
case TypeTags.JSON:
checkFunctionInvocationExpr(iExpr, symTable.jsonType);
break;
case TypeTags.TABLE:
checkFunctionInvocationExpr(iExpr, symTable.tableType);
break;
case TypeTags.STREAM:
checkFunctionInvocationExpr(iExpr, symTable.streamType);
break;
case TypeTags.FUTURE:
checkFunctionInvocationExpr(iExpr, symTable.futureType);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.MAP:
checkFunctionInvocationExpr(iExpr, this.symTable.mapType);
break;
case TypeTags.ERROR:
break;
case TypeTags.INTERMEDIATE_COLLECTION:
dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION_WITH_NAME, iExpr.name,
iExpr.expr.type);
resultType = symTable.errType;
break;
default:
dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION, iExpr.expr.type);
resultType = symTable.errType;
break;
}
if (iExpr.symbol != null) {
iExpr.originalType = ((BInvokableSymbol) iExpr.symbol).type.getReturnType();
} else {
iExpr.originalType = iExpr.type;
}
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null)
|| expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.errType;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.errType) {
resultType = symTable.errType;
return;
}
if (actualType.tag != TypeTags.OBJECT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.errType;
return;
}
if ((actualType.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol);
cIExpr.objectInitInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.errType;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.objectInitInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.objectInitInvocation);
} else if (cIExpr.objectInitInvocation.argExprs.size() > 0) {
dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.objectInitInvocation.exprSymbol);
cIExpr.objectInitInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.errType;
return;
}
cIExpr.objectInitInvocation.type = symTable.nilType;
resultType = types.checkType(cIExpr, actualType, expType);
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
BType thenType = checkExpr(ternaryExpr.thenExpr, env, expType);
BType elseType = checkExpr(ternaryExpr.elseExpr, env, expType);
if (condExprType == symTable.errType || thenType == symTable.errType || elseType == symTable.errType) {
resultType = symTable.errType;
} else if (expType == symTable.noType) {
if (thenType == elseType) {
resultType = thenType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.errType;
}
} else {
resultType = expType;
}
}
public void visit(BLangAwaitExpr awaitExpr) {
BType actualType;
BType expType = checkExpr(awaitExpr.expr, env, this.symTable.noType);
if (expType == symTable.errType) {
actualType = symTable.errType;
} else if (expType.tag == TypeTags.FUTURE) {
actualType = ((BFutureType) expType).constraint;
} else {
dlog.error(awaitExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.futureType, expType);
return;
}
resultType = types.checkType(awaitExpr, actualType, this.expType);
}
public void visit(BLangBinaryExpr binaryExpr) {
BType lhsType = checkExpr(binaryExpr.lhsExpr, env);
BType rhsType = checkExpr(binaryExpr.rhsExpr, env);
BType actualType = symTable.errType;
if (lhsType != symTable.errType && rhsType != symTable.errType) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticCode.BINARY_OP_INCOMPATIBLE_TYPES,
binaryExpr.opKind, lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private BSymbol getBinaryEqualityForTypeSets(OperatorKind opKind, BType lhsType,
BType rhsType, BLangBinaryExpr binaryExpr) {
if (opKind != OperatorKind.EQUAL && opKind != OperatorKind.NOT_EQUAL) {
return symTable.notFoundSymbol;
}
if (types.isIntersectionExist(lhsType, rhsType)) {
if ((!types.isValueType(lhsType) && !types.isValueType(rhsType)) ||
(types.isValueType(lhsType) && types.isValueType(rhsType))) {
return symResolver.createReferenceEqualityOperator(opKind, lhsType, rhsType);
} else {
types.setImplicitCastExpr(binaryExpr.rhsExpr, rhsType, symTable.anyType);
types.setImplicitCastExpr(binaryExpr.lhsExpr, lhsType, symTable.anyType);
return symResolver.createReferenceEqualityOperator(opKind, symTable.anyType, symTable.anyType);
}
} else {
return symTable.notFoundSymbol;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.errType;
if (lhsType != symTable.errType) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
HashSet<BType> memberTypes = new HashSet<BType>();
Iterator<BType> iterator = unionType.getMemberTypes().iterator();
while (iterator.hasNext()) {
BType memberType = iterator.next();
if (memberType != symTable.nilType) {
memberTypes.add(memberType);
}
}
if (memberTypes.size() == 1) {
BType[] memberArray = new BType[1];
memberTypes.toArray(memberArray);
actualType = memberArray[0];
} else {
actualType = new BUnionType(null, memberTypes, false);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED,
OperatorKind.ELVIS, lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.errType || lhsReturnType == symTable.errType) {
resultType = symTable.errType;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType);
resultType = symTable.errType;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) {
if (expType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) this.expType;
List<BType> expTypes = getListWithErrorTypes(bracedOrTupleExpr.expressions.size());
if (tupleType.tupleTypes.size() != bracedOrTupleExpr.expressions.size()) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.SYNTAX_ERROR,
"tuple and expression size does not match");
} else {
expTypes = tupleType.tupleTypes;
}
List<BType> results = new ArrayList<>();
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
BType expType = expTypes.get(i);
BType actualType = checkExpr(bracedOrTupleExpr.expressions.get(i), env, expType);
results.add(expType.tag != TypeTags.NONE ? expType : actualType);
}
resultType = new BTupleType(results);
return;
}
List<BType> results = new ArrayList<>();
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
results.add(checkExpr(bracedOrTupleExpr.expressions.get(i), env, symTable.noType));
}
if (expType.tag == TypeTags.TYPEDESC) {
bracedOrTupleExpr.isTypedescExpr = true;
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
final BLangExpression expr = bracedOrTupleExpr.expressions.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
bracedOrTupleExpr.typedescType = actualTypes.get(0);
} else {
bracedOrTupleExpr.typedescType = new BTupleType(actualTypes);
}
resultType = symTable.typeDesc;
} else if (bracedOrTupleExpr.expressions.size() > 1) {
BType actualType = new BTupleType(results);
if (expType.tag == TypeTags.ANY) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.INVALID_TUPLE_LITERAL, expType);
resultType = symTable.errType;
return;
}
List<BType> tupleCompatibleType = getArrayCompatibleTypes(expType, actualType);
if (tupleCompatibleType.isEmpty()) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
} else if (tupleCompatibleType.size() > 1) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
} else if (tupleCompatibleType.get(0).tag == TypeTags.ANY) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.INVALID_TUPLE_LITERAL, expType);
} else {
resultType = types.checkType(bracedOrTupleExpr, actualType, expType);
}
} else {
bracedOrTupleExpr.isBracedExpr = true;
final BType actualType = results.get(0);
BLangExpression expression = bracedOrTupleExpr.expressions.get(0);
resultType = types.checkType(expression, actualType, expType);
}
}
public void visit(BLangTypedescExpr accessExpr) {
BType actualType = symTable.typeDesc;
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.errType;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.errType) {
actualType = exprType;
}
} else {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.errType) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.errType;
BType targetType = symResolver.resolveTypeNode(conversionExpr.typeNode, env);
conversionExpr.targetType = targetType;
BType sourceType = checkExpr(conversionExpr.expr, env, symTable.noType);
BSymbol symbol = symResolver.resolveConversionOperator(sourceType, targetType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(conversionExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CONVERSION, sourceType, targetType);
} else {
BConversionOperatorSymbol conversionSym = (BConversionOperatorSymbol) symbol;
conversionExpr.conversionSymbol = conversionSym;
actualType = conversionSym.type.getReturnType();
}
resultType = types.checkType(conversionExpr, actualType, expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
BLangFunction lambdaFunction = bLangLambdaFunction.function;
bLangLambdaFunction.type = lambdaFunction.symbol.type;
semanticAnalyzer.analyzeDef(lambdaFunction, env);
lambdaFunction.isTypeChecked = true;
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.type, expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
if (expType.tag != TypeTags.INVOKABLE) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.errType;
return;
}
BInvokableType expectedInvocation = (BInvokableType) this.expType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.expression.type = populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType);
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.expression.type;
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.type = symTable.errType;
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbol(env, names.fromIdNode(bLangXMLQName.prefix), SymTag.XMLNS);
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.UNDEFINED_SYMBOL, prefix);
bLangXMLQName.type = symTable.errType;
return;
}
bLangXMLQName.namespaceURI = ((BXMLNSSymbol) xmlnsSymbol).namespaceURI;
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
checkExpr(bLangXMLAttribute.name, xmlAttributeEnv, symTable.stringType);
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() == NodeKind.XML_QNAME
&& ((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() != NodeKind.XML_QNAME
|| !((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
bLangXMLElementLiteral.namespacesInScope.putAll(namespaces);
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
bLangXMLTextLiteral.concatExpr = getStringTemplateConcatExpr(bLangXMLTextLiteral.textFragments);
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
bLangXMLCommentLiteral.concatExpr = getStringTemplateConcatExpr(bLangXMLCommentLiteral.textFragments);
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
bLangXMLProcInsLiteral.dataConcatExpr = getStringTemplateConcatExpr(bLangXMLProcInsLiteral.dataFragments);
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
bLangXMLQuotedString.concatExpr = getStringTemplateConcatExpr(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
BType actualType = symTable.errType;
checkExpr(xmlAttributeAccessExpr.expr, env, symTable.xmlType);
BLangExpression indexExpr = xmlAttributeAccessExpr.indexExpr;
if (indexExpr == null) {
if (xmlAttributeAccessExpr.lhsVar) {
dlog.error(xmlAttributeAccessExpr.pos, DiagnosticCode.XML_ATTRIBUTE_MAP_UPDATE_NOT_ALLOWED);
} else {
actualType = symTable.xmlAttributesType;
}
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
return;
}
checkExpr(indexExpr, env, symTable.stringType);
if (indexExpr.type.tag == TypeTags.STRING) {
actualType = symTable.stringType;
}
xmlAttributeAccessExpr.namespaces.putAll(symResolver.resolveAllNamespaces(env));
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
stringTemplateLiteral.concatExpr = getStringTemplateConcatExpr(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
BType actualType = symTable.errType;
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.TABLE) {
actualType = expType;
} else if (expTypeTag != TypeTags.ERROR) {
dlog.error(tableQueryExpression.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CONVERSION, expType);
}
BLangTableQuery tableQuery = (BLangTableQuery) tableQueryExpression.getTableQuery();
tableQuery.accept(this);
resultType = types.checkType(tableQueryExpression, actualType, expType);
}
@Override
public void visit(BLangTableQuery tableQuery) {
BLangStreamingInput streamingInput = (BLangStreamingInput) tableQuery.getStreamingInput();
streamingInput.accept(this);
BLangJoinStreamingInput joinStreamingInput = (BLangJoinStreamingInput) tableQuery.getJoinStreamingInput();
if (joinStreamingInput != null) {
joinStreamingInput.accept(this);
}
}
@Override
public void visit(BLangSelectClause selectClause) {
List<? extends SelectExpressionNode> selectExprList = selectClause.getSelectExpressions();
selectExprList.forEach(selectExpr -> ((BLangSelectExpression) selectExpr).accept(this));
BLangGroupBy groupBy = (BLangGroupBy) selectClause.getGroupBy();
if (groupBy != null) {
groupBy.accept(this);
}
BLangHaving having = (BLangHaving) selectClause.getHaving();
if (having != null) {
having.accept(this);
}
}
@Override
public void visit(BLangSelectExpression selectExpression) {
BLangExpression expr = (BLangExpression) selectExpression.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangGroupBy groupBy) {
groupBy.getVariables().forEach(expr -> ((BLangExpression) expr).accept(this));
}
@Override
public void visit(BLangHaving having) {
BLangExpression expr = (BLangExpression) having.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangOrderBy orderBy) {
for (OrderByVariableNode orderByVariableNode : orderBy.getVariables()) {
((BLangOrderByVariable) orderByVariableNode).accept(this);
}
}
@Override
public void visit(BLangOrderByVariable orderByVariable) {
BLangExpression expression = (BLangExpression) orderByVariable.getVariableReference();
expression.accept(this);
}
@Override
public void visit(BLangJoinStreamingInput joinStreamingInput) {
BLangStreamingInput streamingInput = (BLangStreamingInput) joinStreamingInput.getStreamingInput();
streamingInput.accept(this);
}
@Override
public void visit(BLangStreamingInput streamingInput) {
BLangExpression varRef = (BLangExpression) streamingInput.getStreamReference();
varRef.accept(this);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.type = bLangNamedArgsExpression.expr.type;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.type = symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv);
});
Set<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.errType)) {
actualType = symTable.errType;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[matchExprTypes.size()])[0];
} else {
actualType = new BUnionType(null, matchExprTypes, matchExprTypes.contains(symTable.nilType));
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
BType exprType = checkExpr(checkedExpr.expr, env, symTable.noType);
if (exprType.tag != TypeTags.UNION) {
if (types.isAssignable(exprType, symTable.errStructType)) {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS);
} else {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS);
}
checkedExpr.type = symTable.errType;
return;
}
BUnionType unionType = (BUnionType) exprType;
Map<Boolean, List<BType>> resultTypeMap = unionType.memberTypes.stream()
.collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errStructType)));
checkedExpr.equivalentErrorTypeList = resultTypeMap.get(true);
if (checkedExpr.equivalentErrorTypeList == null ||
checkedExpr.equivalentErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS);
checkedExpr.type = symTable.errType;
return;
}
List<BType> nonErrorTypeList = resultTypeMap.get(false);
if (nonErrorTypeList == null || nonErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS);
checkedExpr.type = symTable.errType;
return;
}
BType actualType;
if (nonErrorTypeList.size() == 1) {
actualType = nonErrorTypeList.get(0);
} else {
actualType = new BUnionType(null, new LinkedHashSet<>(nonErrorTypeList),
nonErrorTypeList.contains(symTable.nilType));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.expression, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.errType;
bLangArrowFunction.params.forEach(param -> param.type = symTable.errType);
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.type = bType;
}
}
private void checkSefReferences(DiagnosticPos pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.errType);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
if (pkgAlias == Names.EMPTY && env.enclTypeDefinition != null) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(
env.enclTypeDefinition.name.value, iExpr.name.value));
funcSymbol = symResolver.resolveStructField(iExpr.pos, env, objFuncName,
env.enclTypeDefinition.symbol.type.tsymbol);
if (funcSymbol != symTable.notFoundSymbol) {
iExpr.exprSymbol = symResolver.lookupSymbol(env, Names.SELF, SymTag.VARIABLE);
}
}
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.lookupSymbolInPackage(iExpr.pos, env, pkgAlias, funcName, SymTag.VARIABLE);
}
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName);
resultType = symTable.errType;
return;
}
if (funcSymbol.tag == SymTag.VARIABLE) {
iExpr.functionPointerInvocation = true;
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr, BStructureType structType) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(structType
.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol = symResolver.resolveStructField(iExpr.pos, env, objFuncName, structType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(iExpr.name),
structType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION_IN_OBJECT, iExpr.name.value, structType);
resultType = symTable.errType;
return;
}
if ((funcSymbol.flags & Flags.ATTACHED) != Flags.ATTACHED) {
iExpr.functionPointerInvocation = true;
}
} else {
if (structType.tag == TypeTags.RECORD) {
BAttachedFunction initializerFunc = ((BRecordTypeSymbol) structType.tsymbol).initializerFunc;
if (initializerFunc != null && initializerFunc.funcName.value.equals(iExpr.name.value)) {
dlog.error(iExpr.pos, DiagnosticCode.RECORD_INITIALIZER_INVOKED, structType.tsymbol.toString());
}
}
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(
Symbols.getAttachedFuncSymbolName(bType.toString(), iExpr.name.value));
BPackageSymbol packageSymbol = (BPackageSymbol) bType.tsymbol.owner;
BSymbol funcSymbol = symResolver.lookupMemberSymbol(iExpr.pos, packageSymbol.scope, this.env,
funcName, SymTag.FUNCTION);
if (funcSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName);
resultType = symTable.errType;
return;
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private boolean isIterableOperationInvocation(BLangInvocation iExpr) {
final IterableKind iterableKind = IterableKind.getFromString(iExpr.name.value);
switch (iExpr.expr.type.tag) {
case TypeTags.ARRAY:
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.JSON:
case TypeTags.STREAM:
case TypeTags.TABLE:
case TypeTags.INTERMEDIATE_COLLECTION:
return iterableKind != IterableKind.UNDEFINED;
case TypeTags.XML: {
return iterableKind != IterableKind.SELECT
&& iterableKind != IterableKind.UNDEFINED;
}
}
return false;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
if (iExpr.expr != null) {
actualType = getAccessExprFinalType(iExpr, actualType);
}
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
List<BType> paramTypes = ((BInvokableType) iExpr.symbol.type).getParameterTypes();
int requiredParamsCount;
if (iExpr.symbol.tag == SymTag.VARIABLE) {
requiredParamsCount = paramTypes.size();
} else {
requiredParamsCount = ((BInvokableSymbol) iExpr.symbol).params.size();
}
int i = 0;
BLangExpression vararg = null;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
iExpr.namedArgs.add(expr);
break;
case REST_ARGS_EXPR:
vararg = expr;
break;
default:
if (i < requiredParamsCount) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, requiredParamsCount, vararg);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, int requiredParamsCount,
BLangExpression vararg) {
BType actualType = symTable.errType;
BInvokableSymbol invocableSymbol = (BInvokableSymbol) iExpr.symbol;
if (requiredParamsCount > iExpr.requiredArgs.size()) {
dlog.error(iExpr.pos, DiagnosticCode.NOT_ENOUGH_ARGS_FUNC_CALL, iExpr.name.value);
return actualType;
} else if (invocableSymbol.restParam == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
if (invocableSymbol.defaultableParams.isEmpty()) {
dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
} else {
dlog.error(iExpr.pos, DiagnosticCode.DEFAULTABLE_ARG_PASSED_AS_REQUIRED_ARG, iExpr.name.value);
}
return actualType;
}
checkRequiredArgs(iExpr.requiredArgs, paramTypes);
checkNamedArgs(iExpr.namedArgs, invocableSymbol.defaultableParams);
checkRestArgs(iExpr.restArgs, vararg, invocableSymbol.restParam);
if (iExpr.async) {
return this.generateFutureType(invocableSymbol);
} else {
return invocableSymbol.type.getReturnType();
}
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol) {
BType retType = invocableSymbol.type.getReturnType();
return new BFutureType(TypeTags.FUTURE, retType, null);
}
private void checkRequiredArgs(List<BLangExpression> requiredArgExprs, List<BType> requiredParamTypes) {
for (int i = 0; i < requiredArgExprs.size(); i++) {
checkExpr(requiredArgExprs.get(i), this.env, requiredParamTypes.get(i));
}
}
private void checkNamedArgs(List<BLangExpression> namedArgExprs, List<BVarSymbol> defaultableParams) {
for (BLangExpression expr : namedArgExprs) {
BLangIdentifier argName = ((NamedArgNode) expr).getName();
BVarSymbol varSym = defaultableParams.stream()
.filter(param -> param.getName().value.equals(argName.value))
.findAny()
.orElse(null);
if (varSym == null) {
dlog.error(expr.pos, DiagnosticCode.UNDEFINED_PARAMETER, argName);
break;
}
checkExpr(expr, this.env, varSym.type);
}
}
private void checkRestArgs(List<BLangExpression> restArgExprs, BLangExpression vararg, BVarSymbol restParam) {
if (vararg != null && !restArgExprs.isEmpty()) {
dlog.error(vararg.pos, DiagnosticCode.INVALID_REST_ARGS);
return;
}
if (vararg != null) {
checkExpr(vararg, this.env, restParam.type);
restArgExprs.add(vararg);
return;
}
for (BLangExpression arg : restArgExprs) {
checkExpr(arg, this.env, ((BArrayType) restParam.type).eType);
}
}
private void checkActionInvocationExpr(BLangInvocation iExpr, BType conType) {
BType actualType = symTable.errType;
if (conType == symTable.errType || conType.tag != TypeTags.OBJECT
|| iExpr.expr.symbol.tag != SymTag.ENDPOINT) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION);
resultType = actualType;
return;
}
final BEndpointVarSymbol epSymbol = (BEndpointVarSymbol) iExpr.expr.symbol;
if (!epSymbol.interactable) {
dlog.error(iExpr.pos, DiagnosticCode.ENDPOINT_NOT_SUPPORT_INTERACTIONS, epSymbol.name);
resultType = actualType;
return;
}
BSymbol conSymbol = epSymbol.clientSymbol;
if (conSymbol == null
|| conSymbol == symTable.notFoundSymbol
|| conSymbol == symTable.errSymbol
|| !(conSymbol.type.tag == TypeTags.OBJECT || conSymbol.type.tag == TypeTags.RECORD)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION);
resultType = actualType;
return;
}
Name actionName = names.fromIdNode(iExpr.name);
Name uniqueFuncName = names.fromString(
Symbols.getAttachedFuncSymbolName(conSymbol.name.value, actionName.value));
BPackageSymbol packageSymbol = (BPackageSymbol) conSymbol.owner;
BSymbol actionSym = symResolver.lookupMemberSymbol(iExpr.pos, packageSymbol.scope, this.env,
uniqueFuncName, SymTag.FUNCTION);
if (actionSym == symTable.notFoundSymbol) {
actionSym = symResolver.resolveStructField(iExpr.pos, env, uniqueFuncName, (BTypeSymbol) conSymbol);
}
if (actionSym == symTable.errSymbol || actionSym == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_ACTION, actionName, epSymbol.name, conSymbol.type);
resultType = actualType;
return;
}
iExpr.symbol = actionSym;
checkInvocationParamAndReturnType(iExpr);
}
private BType checkRecLiteralKeyValue(BLangRecordKeyValue keyValuePair, BType recType) {
BType fieldType = symTable.errType;
BLangExpression valueExpr = keyValuePair.valueExpr;
switch (recType.tag) {
case TypeTags.RECORD:
fieldType = checkStructLiteralKeyExpr(keyValuePair.key, recType);
break;
case TypeTags.MAP:
fieldType = checkMapLiteralKeyExpr(keyValuePair.key.expr, recType, RecordKind.MAP);
break;
case TypeTags.JSON:
fieldType = checkJSONLiteralKeyExpr(keyValuePair.key, recType, RecordKind.JSON);
if (fieldType.tag == TypeTags.OBJECT || fieldType.tag == TypeTags.RECORD) {
fieldType = new BJSONType(TypeTags.JSON, fieldType, symTable.jsonType.tsymbol);
}
checkExpr(valueExpr, this.env, fieldType);
if (valueExpr.impConversionExpr == null) {
types.checkTypes(valueExpr, Lists.of(valueExpr.type), Lists.of(symTable.jsonType));
} else {
BType valueType = valueExpr.type;
types.checkType(valueExpr, valueExpr.impConversionExpr.type, symTable.jsonType);
valueExpr.type = valueType;
}
resultType = valueExpr.type;
return resultType;
}
return checkExpr(valueExpr, this.env, fieldType);
}
private BType checkStructLiteralKeyExpr(BLangRecordKey key, BType recordType) {
Name fieldName;
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else {
dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY);
return symTable.errType;
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env,
fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
if (((BRecordType) recordType).sealed) {
dlog.error(keyExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName,
recordType.tsymbol.type.getKind().typeName(), recordType.tsymbol);
return symTable.errType;
}
return ((BRecordType) recordType).restFieldType;
}
return fieldSymbol.type;
}
private BType checkJSONLiteralKeyExpr(BLangRecordKey key, BType recordType, RecordKind recKind) {
BJSONType type = (BJSONType) recordType;
if (type.constraint.tag != TypeTags.NONE && type.constraint.tag != TypeTags.ERROR) {
return checkStructLiteralKeyExpr(key, type.constraint);
}
if (checkRecLiteralKeyExpr(key.expr, recKind).tag != TypeTags.STRING) {
return symTable.errType;
}
return symTable.jsonType;
}
private BType checkMapLiteralKeyExpr(BLangExpression keyExpr, BType recordType, RecordKind recKind) {
if (checkRecLiteralKeyExpr(keyExpr, recKind).tag != TypeTags.STRING) {
return symTable.errType;
}
return ((BMapType) recordType).constraint;
}
private BType checkRecLiteralKeyExpr(BLangExpression keyExpr, RecordKind recKind) {
if (keyExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return checkExpr(keyExpr, this.env, symTable.stringType);
}
keyExpr.type = symTable.stringType;
return keyExpr.type;
}
private BType checkIndexExprForStructFieldAccess(BLangExpression indexExpr) {
if (indexExpr.getKind() != NodeKind.LITERAL) {
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_INDEX_EXPR_STRUCT_FIELD_ACCESS);
return symTable.errType;
}
return checkExpr(indexExpr, this.env, symTable.stringType);
}
private BType checkStructFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BType structType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, structType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
if (structType.tag == TypeTags.OBJECT) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(structType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(varReferExpr.pos, env, objFuncName, structType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(varReferExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName,
structType.tsymbol.type.getKind().typeName(), structType.tsymbol);
return symTable.errType;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
if (((BRecordType) structType).sealed) {
dlog.error(varReferExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName,
structType.tsymbol.type.getKind().typeName(), structType.tsymbol);
return symTable.errType;
}
return ((BRecordType) structType).restFieldType;
}
private BType checkTupleFieldType(BLangIndexBasedAccess indexBasedAccessExpr, BType varRefType, int indexValue) {
List<BType> tupleTypes = ((BTupleType) varRefType).tupleTypes;
if (indexValue < 0 || tupleTypes.size() <= indexValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticCode.TUPLE_INDEX_OUT_OF_RANGE, indexValue, tupleTypes.size());
return symTable.errType;
}
return tupleTypes.get(indexValue);
}
private BType checkIndexExprForTupleFieldAccess(BLangExpression indexExpr) {
if (indexExpr.getKind() != NodeKind.LITERAL) {
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_INDEX_EXPR_TUPLE_FIELD_ACCESS);
return symTable.errType;
}
return checkExpr(indexExpr, this.env, symTable.intType);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName != null) {
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
}
if (endTagName == null) {
return;
}
if (startTagName.getKind() == NodeKind.XML_QNAME && startTagName.getKind() == NodeKind.XML_QNAME
&& startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && startTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(startTagName.pos, DiagnosticCode.XML_TAGS_MISMATCH);
}
private BLangExpression getStringTemplateConcatExpr(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
if (concatExpr == null) {
concatExpr = expr;
continue;
}
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.stringType, expr.type);
if (opSymbol == symTable.notFoundSymbol && expr.type != symTable.errType) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType, expr.type);
}
concatExpr = getBinaryAddExpr(concatExpr, expr, opSymbol);
}
return concatExpr;
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
BLangExpression strConcatExpr = null;
for (BLangExpression expr : exprs) {
BType exprType = checkExpr(expr, xmlElementEnv);
if (exprType == symTable.xmlType) {
if (strConcatExpr != null) {
newChildren.add(getXMLTextLiteral(strConcatExpr));
strConcatExpr = null;
}
newChildren.add(expr);
continue;
}
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.stringType, exprType);
if (opSymbol == symTable.notFoundSymbol && exprType != symTable.errType) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.xmlType, exprType);
}
if (strConcatExpr == null) {
strConcatExpr = expr;
continue;
}
strConcatExpr = getBinaryAddExpr(strConcatExpr, expr, opSymbol);
}
if (strConcatExpr != null) {
newChildren.add(getXMLTextLiteral(strConcatExpr));
}
return newChildren;
}
private BLangExpression getBinaryAddExpr(BLangExpression lExpr, BLangExpression rExpr, BSymbol opSymbol) {
BLangBinaryExpr binaryExpressionNode = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpressionNode.lhsExpr = lExpr;
binaryExpressionNode.rhsExpr = rExpr;
binaryExpressionNode.pos = rExpr.pos;
binaryExpressionNode.opKind = OperatorKind.ADD;
if (opSymbol != symTable.notFoundSymbol) {
binaryExpressionNode.type = opSymbol.type.getReturnType();
binaryExpressionNode.opSymbol = (BOperatorSymbol) opSymbol;
} else {
binaryExpressionNode.type = symTable.errType;
}
types.checkType(binaryExpressionNode, binaryExpressionNode.type, symTable.stringType);
return binaryExpressionNode;
}
private BLangExpression getXMLTextLiteral(BLangExpression contentExpr) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.concatExpr = contentExpr;
xmlTextLiteral.pos = contentExpr.pos;
return xmlTextLiteral;
}
private BType getTypeOfExprInFieldAccess(BLangExpression expr) {
checkExpr(expr, this.env, symTable.noType);
return expr.type;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = new BUnionType(null, new LinkedHashSet<>(), false);
if (actualType.tag == TypeTags.UNION) {
unionType.memberTypes.addAll(((BUnionType) actualType).memberTypes);
unionType.setNullable(actualType.isNullable());
} else {
unionType.memberTypes.add(actualType);
}
if (returnsNull(accessExpr)) {
unionType.memberTypes.add(symTable.nilType);
unionType.setNullable(true);
}
BType parentType = accessExpr.expr.type;
if (accessExpr.safeNavigate && (parentType.tag == TypeTags.ERROR || (parentType.tag == TypeTags.UNION &&
((BUnionType) parentType).memberTypes.contains(symTable.errStructType)))) {
unionType.memberTypes.add(symTable.errStructType);
}
if (unionType.memberTypes.size() == 1 &&
(!unionType.isNullable() || unionType.memberTypes.contains(symTable.nilType))) {
return unionType.memberTypes.toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.type;
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.type).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.errType;
switch (varRefType.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
actualType = checkStructFieldAccess(fieldAccessExpr, fieldName, varRefType);
break;
case TypeTags.MAP:
actualType = ((BMapType) varRefType).getConstraint();
break;
case TypeTags.STREAM:
BType streamConstraintType = ((BStreamType) varRefType).constraint;
if (streamConstraintType.tag == TypeTags.RECORD) {
actualType = checkStructFieldAccess(fieldAccessExpr, fieldName, streamConstraintType);
}
break;
case TypeTags.JSON:
BType constraintType = ((BJSONType) varRefType).constraint;
if (constraintType.tag == TypeTags.OBJECT || constraintType.tag == TypeTags.RECORD) {
BType fieldType = checkStructFieldAccess(fieldAccessExpr, fieldName, constraintType);
if (fieldType.tag == TypeTags.OBJECT || fieldType.tag == TypeTags.RECORD) {
actualType = new BJSONType(TypeTags.JSON, fieldType, symTable.jsonType.tsymbol);
break;
}
}
actualType = symTable.jsonType;
break;
case TypeTags.XML:
if (fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
break;
}
actualType = symTable.xmlType;
break;
case TypeTags.ERROR:
break;
default:
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr, BType varRefType) {
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.errType;
BType indexExprType;
switch (varRefType.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
indexExprType = checkIndexExprForStructFieldAccess(indexExpr);
if (indexExprType.tag == TypeTags.STRING) {
String fieldName = (String) ((BLangLiteral) indexExpr).value;
actualType = checkStructFieldAccess(indexBasedAccessExpr, names.fromString(fieldName), varRefType);
}
break;
case TypeTags.MAP:
indexExprType = checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExprType.tag == TypeTags.STRING) {
actualType = ((BMapType) varRefType).getConstraint();
if (actualType.tag != TypeTags.ANY && actualType.tag != TypeTags.JSON) {
actualType = new BUnionType(null, new LinkedHashSet<>(getTypesList(actualType)), true);
}
}
break;
case TypeTags.JSON:
BType constraintType = ((BJSONType) varRefType).constraint;
if (constraintType.tag == TypeTags.OBJECT || constraintType.tag == TypeTags.RECORD) {
indexExprType = checkIndexExprForStructFieldAccess(indexExpr);
if (indexExprType.tag != TypeTags.STRING) {
break;
}
String fieldName = (String) ((BLangLiteral) indexExpr).value;
BType fieldType =
checkStructFieldAccess(indexBasedAccessExpr, names.fromString(fieldName), constraintType);
if (fieldType.tag == TypeTags.OBJECT || fieldType.tag == TypeTags.RECORD) {
actualType = new BJSONType(TypeTags.JSON, fieldType, symTable.jsonType.tsymbol);
break;
}
} else {
indexExprType = checkExpr(indexExpr, this.env, symTable.noType);
if (indexExprType.tag != TypeTags.STRING && indexExprType.tag != TypeTags.INT) {
dlog.error(indexExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType,
indexExprType);
break;
}
}
actualType = symTable.jsonType;
break;
case TypeTags.ARRAY:
indexExprType = checkExpr(indexExpr, this.env, symTable.intType);
if (indexExprType.tag == TypeTags.INT) {
actualType = ((BArrayType) varRefType).getElementType();
}
break;
case TypeTags.XML:
if (indexBasedAccessExpr.lhsVar) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
break;
}
checkExpr(indexExpr, this.env);
actualType = symTable.xmlType;
break;
case TypeTags.TUPLE:
indexExprType = checkIndexExprForTupleFieldAccess(indexExpr);
if (indexExprType.tag == TypeTags.INT) {
int indexValue = ((Long) ((BLangLiteral) indexExpr).value).intValue();
actualType = checkTupleFieldType(indexBasedAccessExpr, varRefType, indexValue);
}
break;
case TypeTags.ERROR:
break;
default:
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING,
indexBasedAccessExpr.expr.type);
}
return actualType;
}
private BType getSafeType(BType type, BLangAccessExpression accessExpr) {
if (accessExpr.safeNavigate && type == symTable.errStructType) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.errType;
}
if (type.tag != TypeTags.UNION) {
return type;
}
Set<BType> varRefMemberTypes = ((BUnionType) type).memberTypes;
List<BType> lhsTypes;
boolean nullable = false;
if (accessExpr.safeNavigate) {
if (!varRefMemberTypes.contains(symTable.errStructType)) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.errType;
}
lhsTypes = varRefMemberTypes.stream().filter(memberType -> {
return memberType != symTable.errStructType && memberType != symTable.nilType;
}).collect(Collectors.toList());
if (lhsTypes.isEmpty()) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.errType;
}
} else {
lhsTypes = varRefMemberTypes.stream().filter(memberType -> {
return memberType != symTable.nilType;
}).collect(Collectors.toList());
}
if (lhsTypes.size() == 1) {
return lhsTypes.get(0);
}
return new BUnionType(null, new LinkedHashSet<>(lhsTypes), nullable);
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.memberTypes);
} else {
return Lists.of(type);
}
}
private Set<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.type);
Set<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.type;
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.ERROR || patternExprType.tag == TypeTags.ERROR) {
return new HashSet<>(Lists.of(symTable.errType));
}
assignable = this.types.isAssignable(type, pattern.variable.type);
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY =
new CompilerContext.Key<>();
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private Types types;
private IterableAnalyzer iterableAnalyzer;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.types = Types.getInstance(context);
this.iterableAnalyzer = IterableAnalyzer.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticCode.INCOMPATIBLE_TYPES);
}
/**
* Check the given list of expressions against the given expected types.
*
* @param exprs list of expressions to be analyzed
* @param env current symbol environment
* @param expType expected type
* @return the actual types of the given list of expressions
*/
public List<BType> checkExprs(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> resTypes = new ArrayList<>(exprs.size());
for (BLangExpression expr : exprs) {
resTypes.add(checkExpr(expr, env, expType));
}
return resTypes;
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
expr.accept(this);
expr.type = resultType;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
return resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = symTable.getTypeFromTag(literalExpr.typeTag);
Object literalValue = literalExpr.value;
if (TypeTags.FLOAT == expType.tag && TypeTags.INT == literalType.tag) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
}
if (TypeTags.BYTE == expType.tag && TypeTags.INT == literalType.tag) {
if (!isByteLiteralValue((Long) literalValue)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, literalType);
return;
}
literalType = symTable.byteType;
literalExpr.value = ((Long) literalValue).byteValue();
}
if (TypeTags.BYTE_ARRAY == literalExpr.typeTag) {
literalType = new BArrayType(symTable.byteType);
}
if (this.expType.tag == TypeTags.FINITE) {
BFiniteType expType = (BFiniteType) this.expType;
boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr);
if (foundMember) {
types.setImplicitCastExpr(literalExpr, literalType, this.expType);
resultType = literalType;
return;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.memberTypes
.stream()
.map(memberType -> types.isAssignableToFiniteType(memberType, literalExpr))
.anyMatch(foundType -> foundType);
if (foundMember) {
types.setImplicitCastExpr(literalExpr, literalType, this.expType);
resultType = literalType;
return;
}
}
resultType = types.checkType(literalExpr, literalType, expType);
}
private static boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
public void visit(BLangTableLiteral tableLiteral) {
if (expType.tag == symTable.errType.tag) {
return;
}
BType tableConstraint = ((BTableType) expType).getConstraint();
if (tableConstraint.tag == TypeTags.NONE) {
dlog.error(tableLiteral.pos, DiagnosticCode.TABLE_CANNOT_BE_CREATED_WITHOUT_CONSTRAINT);
return;
}
validateTableColumns(tableConstraint, tableLiteral);
checkExprs(tableLiteral.tableDataRows, this.env, tableConstraint);
resultType = types.checkType(tableLiteral, expType, symTable.noType);
}
private void validateTableColumns(BType tableConstraint, BLangTableLiteral tableLiteral) {
if (tableConstraint.tag != TypeTags.ERROR) {
List<String> columnNames = new ArrayList<>();
for (BField field : ((BRecordType) tableConstraint).fields) {
columnNames.add(field.getName().getValue());
}
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
boolean contains = columnNames.contains(column.columnName);
if (!contains) {
dlog.error(tableLiteral.pos, DiagnosticCode.UNDEFINED_TABLE_COLUMN, column.columnName,
tableConstraint);
}
}
}
}
public void visit(BLangArrayLiteral arrayLiteral) {
BType actualType = symTable.errType;
if (expType.tag == TypeTags.ANY) {
dlog.error(arrayLiteral.pos, DiagnosticCode.INVALID_ARRAY_LITERAL, expType);
resultType = symTable.errType;
return;
}
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.JSON) {
checkExprs(arrayLiteral.exprs, this.env, expType);
actualType = expType;
} else if (expTypeTag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) expType;
if (arrayType.state == BArrayState.OPEN_SEALED) {
arrayType.size = arrayLiteral.exprs.size();
arrayType.state = BArrayState.CLOSED_SEALED;
} else if (arrayType.state != BArrayState.UNSEALED && arrayType.size != arrayLiteral.exprs.size()) {
dlog.error(arrayLiteral.pos,
DiagnosticCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, arrayLiteral.exprs.size());
resultType = symTable.errType;
return;
}
checkExprs(arrayLiteral.exprs, this.env, arrayType.eType);
actualType = arrayType;
} else if (expTypeTag != TypeTags.ERROR) {
List<BType> resTypes = checkExprs(arrayLiteral.exprs, this.env, symTable.noType);
Set<BType> arrayLitExprTypeSet = new HashSet<>(resTypes);
BType[] uniqueExprTypes = arrayLitExprTypeSet.toArray(new BType[0]);
if (uniqueExprTypes.length == 0) {
actualType = symTable.anyType;
} else if (uniqueExprTypes.length == 1) {
actualType = resTypes.get(0);
} else {
BType superType = uniqueExprTypes[0];
for (int i = 1; i < uniqueExprTypes.length; i++) {
if (types.isAssignable(superType, uniqueExprTypes[i])) {
superType = uniqueExprTypes[i];
} else if (!types.isAssignable(uniqueExprTypes[i], superType)) {
superType = symTable.anyType;
break;
}
}
actualType = superType;
}
actualType = new BArrayType(actualType, null, arrayLiteral.exprs.size(), BArrayState.UNSEALED);
List<BType> arrayCompatibleType = getArrayCompatibleTypes(expType, actualType);
if (arrayCompatibleType.isEmpty()) {
dlog.error(arrayLiteral.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
} else if (arrayCompatibleType.size() > 1) {
dlog.error(arrayLiteral.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
} else if (arrayCompatibleType.get(0).tag == TypeTags.ANY) {
dlog.error(arrayLiteral.pos, DiagnosticCode.INVALID_ARRAY_LITERAL, expType);
} else if (arrayCompatibleType.get(0).tag == TypeTags.ARRAY) {
checkExprs(arrayLiteral.exprs, this.env, ((BArrayType) arrayCompatibleType.get(0)).eType);
}
}
resultType = types.checkType(arrayLiteral, actualType, expType);
}
private List<BType> getRecordCompatibleType(BType bType, BLangRecordLiteral recordLiteral) {
if (bType.tag == TypeTags.UNION) {
Set<BType> expTypes = ((BUnionType) bType).memberTypes;
return expTypes.stream()
.filter(type -> type.tag == TypeTags.JSON ||
type.tag == TypeTags.MAP ||
(type.tag == TypeTags.RECORD && !((BRecordType) type).sealed) ||
(type.tag == TypeTags.RECORD
&& ((BRecordType) type).sealed
&& isRecordLiteralCompatible((BRecordType) type, recordLiteral)))
.collect(Collectors.toList());
} else {
switch (expType.tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return new ArrayList<>(Collections.singleton(expType));
default:
return Collections.emptyList();
}
}
}
private boolean isRecordLiteralCompatible(BRecordType bRecordType, BLangRecordLiteral recordLiteral) {
for (BLangRecordKeyValue literalKeyValuePair : recordLiteral.getKeyValuePairs()) {
boolean matched = false;
for (BField field : bRecordType.getFields()) {
matched = ((BLangSimpleVarRef) literalKeyValuePair.getKey()).variableName.value
.equals(field.getName().getValue());
if (matched) {
break;
}
}
if (!matched) {
return false;
}
}
return true;
}
private void checkMissingRequiredFields(BRecordType type, List<BLangRecordKeyValue> keyValuePairs,
DiagnosticPos pos) {
type.fields.forEach(field -> {
boolean hasField = keyValuePairs.stream()
.filter(keyVal -> keyVal.key.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF)
.anyMatch(keyVal -> field.name.value
.equals(((BLangSimpleVarRef) keyVal.key.expr).variableName.value));
if (!hasField && !Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
(!types.defaultValueExists(pos, field.type) &&
!Symbols.isFlagOn(field.symbol.flags, Flags.DEFAULTABLE))) {
dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private List<BType> getArrayCompatibleTypes(BType expType, BType actualType) {
Set<BType> expTypes =
expType.tag == TypeTags.UNION ? ((BUnionType) expType).memberTypes : new HashSet<BType>() {
{
add(expType);
}
};
return expTypes.stream()
.filter(type -> types.isAssignable(actualType, type) ||
type.tag == TypeTags.NONE ||
type.tag == TypeTags.ANY)
.collect(Collectors.toList());
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.errType;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.lhsVar) {
varRefExpr.type = this.symTable.noType;
} else {
varRefExpr.type = this.symTable.errType;
dlog.error(varRefExpr.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, varName, env.enclPkg.symbol.pkgID, actualType, env.scope.owner);
resultType = varRefExpr.type;
return;
}
varRefExpr.pkgSymbol = symResolver.resolveImportSymbol(varRefExpr.pos,
env, names.fromIdNode(varRefExpr.pkgAlias));
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName, SymTag.VARIABLE_NAME);
if (symbol == symTable.notFoundSymbol && env.enclTypeDefinition != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclTypeDefinition.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclTypeDefinition.symbol.type.tsymbol);
}
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSefReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
BLangInvokableNode encInvokable = env.enclInvokable;
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) &&
!(symbol.owner instanceof BPackageSymbol)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol closureVarSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE_NAME);
if (closureVarSymbol != symTable.notFoundSymbol &&
!isFunctionArgument(closureVarSymbol, env.enclInvokable.requiredParams)) {
((BLangFunction) env.enclInvokable).closureVarSymbols.add((BVarSymbol) closureVarSymbol);
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR && !(symbol.owner instanceof BPackageSymbol)) {
symbol.owner = Symbols.createInvokableSymbol(SymTag.FUNCTION, 0, null,
env.enclPkg.packageID, null, symbol.owner);
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol closureVarSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE_NAME);
if (closureVarSymbol != symTable.notFoundSymbol &&
!isFunctionArgument(closureVarSymbol, ((BLangArrowFunction) env.node).params)) {
((BLangArrowFunction) env.node).closureVarSymbols.add((BVarSymbol) closureVarSymbol);
}
}
} else if ((symbol.tag & SymTag.TYPE) == SymTag.TYPE) {
actualType = symTable.typeDesc;
varRefExpr.symbol = symbol;
} else {
dlog.error(varRefExpr.pos, DiagnosticCode.UNDEFINED_SYMBOL, varName.toString());
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticCode.SEALED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.OPEN_SEALED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.type.tag == symbol.type.tag));
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
fieldAccessExpr.expr.lhsVar = fieldAccessExpr.lhsVar;
BType varRefType = getTypeOfExprInFieldAccess(fieldAccessExpr.expr);
if (fieldAccessExpr.fieldKind == FieldKind.ALL && varRefType.tag != TypeTags.XML) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_GET_ALL_FIELDS, varRefType);
}
if (fieldAccessExpr.lhsVar && fieldAccessExpr.safeNavigate) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.INVALID_ERROR_LIFTING_ON_LHS);
resultType = symTable.errType;
return;
}
varRefType = getSafeType(varRefType, fieldAccessExpr);
Name fieldName = names.fromIdNode(fieldAccessExpr.field);
BType actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (fieldAccessExpr.lhsVar) {
fieldAccessExpr.originalType = actualType;
fieldAccessExpr.type = actualType;
resultType = actualType;
return;
}
actualType = getAccessExprFinalType(fieldAccessExpr, actualType);
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
indexBasedAccessExpr.expr.lhsVar = indexBasedAccessExpr.lhsVar;
checkExpr(indexBasedAccessExpr.expr, this.env, symTable.noType);
BType varRefType = indexBasedAccessExpr.expr.type;
varRefType = getSafeType(varRefType, indexBasedAccessExpr);
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr, varRefType);
if (indexBasedAccessExpr.lhsVar) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.type = actualType;
resultType = actualType;
return;
}
actualType = getAccessExprFinalType(indexBasedAccessExpr, actualType);
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(iExpr.pos, DiagnosticCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return;
}
final BType exprType = checkExpr(iExpr.expr, this.env, symTable.noType);
if (isIterableOperationInvocation(iExpr)) {
iExpr.iterableOperationInvocation = true;
iterableAnalyzer.handlerIterableOperation(iExpr, expType, env);
resultType = iExpr.iContext.operations.getLast().resultType;
return;
}
if (iExpr.actionInvocation) {
checkActionInvocationExpr(iExpr, exprType);
return;
}
BType varRefType = iExpr.expr.type;
varRefType = getSafeType(varRefType, iExpr);
switch (varRefType.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
checkFunctionInvocationExpr(iExpr, (BStructureType) varRefType);
break;
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.INT:
case TypeTags.FLOAT:
case TypeTags.XML:
checkFunctionInvocationExpr(iExpr, varRefType);
break;
case TypeTags.JSON:
checkFunctionInvocationExpr(iExpr, symTable.jsonType);
break;
case TypeTags.TABLE:
checkFunctionInvocationExpr(iExpr, symTable.tableType);
break;
case TypeTags.STREAM:
checkFunctionInvocationExpr(iExpr, symTable.streamType);
break;
case TypeTags.FUTURE:
checkFunctionInvocationExpr(iExpr, symTable.futureType);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.MAP:
checkFunctionInvocationExpr(iExpr, this.symTable.mapType);
break;
case TypeTags.ERROR:
break;
case TypeTags.INTERMEDIATE_COLLECTION:
dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION_WITH_NAME, iExpr.name,
iExpr.expr.type);
resultType = symTable.errType;
break;
default:
dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION, iExpr.expr.type);
resultType = symTable.errType;
break;
}
if (iExpr.symbol != null) {
iExpr.originalType = ((BInvokableSymbol) iExpr.symbol).type.getReturnType();
} else {
iExpr.originalType = iExpr.type;
}
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null)
|| expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.errType;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.errType) {
resultType = symTable.errType;
return;
}
if (actualType.tag != TypeTags.OBJECT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.errType;
return;
}
if ((actualType.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol);
cIExpr.objectInitInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.errType;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.objectInitInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.objectInitInvocation);
} else if (cIExpr.objectInitInvocation.argExprs.size() > 0) {
dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.objectInitInvocation.exprSymbol);
cIExpr.objectInitInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.errType;
return;
}
cIExpr.objectInitInvocation.type = symTable.nilType;
resultType = types.checkType(cIExpr, actualType, expType);
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
BType thenType = checkExpr(ternaryExpr.thenExpr, env, expType);
BType elseType = checkExpr(ternaryExpr.elseExpr, env, expType);
if (condExprType == symTable.errType || thenType == symTable.errType || elseType == symTable.errType) {
resultType = symTable.errType;
} else if (expType == symTable.noType) {
if (thenType == elseType) {
resultType = thenType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.errType;
}
} else {
resultType = expType;
}
}
public void visit(BLangAwaitExpr awaitExpr) {
BType actualType;
BType expType = checkExpr(awaitExpr.expr, env, this.symTable.noType);
if (expType == symTable.errType) {
actualType = symTable.errType;
} else if (expType.tag == TypeTags.FUTURE) {
actualType = ((BFutureType) expType).constraint;
} else {
dlog.error(awaitExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.futureType, expType);
return;
}
resultType = types.checkType(awaitExpr, actualType, this.expType);
}
public void visit(BLangBinaryExpr binaryExpr) {
BType lhsType = checkExpr(binaryExpr.lhsExpr, env);
BType rhsType = checkExpr(binaryExpr.rhsExpr, env);
BType actualType = symTable.errType;
if (lhsType != symTable.errType && rhsType != symTable.errType) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticCode.BINARY_OP_INCOMPATIBLE_TYPES,
binaryExpr.opKind, lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private BSymbol getBinaryEqualityForTypeSets(OperatorKind opKind, BType lhsType,
BType rhsType, BLangBinaryExpr binaryExpr) {
if (opKind != OperatorKind.EQUAL && opKind != OperatorKind.NOT_EQUAL) {
return symTable.notFoundSymbol;
}
if (types.isIntersectionExist(lhsType, rhsType)) {
if ((!types.isValueType(lhsType) && !types.isValueType(rhsType)) ||
(types.isValueType(lhsType) && types.isValueType(rhsType))) {
return symResolver.createReferenceEqualityOperator(opKind, lhsType, rhsType);
} else {
types.setImplicitCastExpr(binaryExpr.rhsExpr, rhsType, symTable.anyType);
types.setImplicitCastExpr(binaryExpr.lhsExpr, lhsType, symTable.anyType);
return symResolver.createReferenceEqualityOperator(opKind, symTable.anyType, symTable.anyType);
}
} else {
return symTable.notFoundSymbol;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.errType;
if (lhsType != symTable.errType) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
HashSet<BType> memberTypes = new HashSet<BType>();
Iterator<BType> iterator = unionType.getMemberTypes().iterator();
while (iterator.hasNext()) {
BType memberType = iterator.next();
if (memberType != symTable.nilType) {
memberTypes.add(memberType);
}
}
if (memberTypes.size() == 1) {
BType[] memberArray = new BType[1];
memberTypes.toArray(memberArray);
actualType = memberArray[0];
} else {
actualType = new BUnionType(null, memberTypes, false);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED,
OperatorKind.ELVIS, lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.errType || lhsReturnType == symTable.errType) {
resultType = symTable.errType;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType);
resultType = symTable.errType;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) {
if (expType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) this.expType;
List<BType> expTypes = getListWithErrorTypes(bracedOrTupleExpr.expressions.size());
if (tupleType.tupleTypes.size() != bracedOrTupleExpr.expressions.size()) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.SYNTAX_ERROR,
"tuple and expression size does not match");
} else {
expTypes = tupleType.tupleTypes;
}
List<BType> results = new ArrayList<>();
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
BType expType = expTypes.get(i);
BType actualType = checkExpr(bracedOrTupleExpr.expressions.get(i), env, expType);
results.add(expType.tag != TypeTags.NONE ? expType : actualType);
}
resultType = new BTupleType(results);
return;
}
List<BType> results = new ArrayList<>();
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
results.add(checkExpr(bracedOrTupleExpr.expressions.get(i), env, symTable.noType));
}
if (expType.tag == TypeTags.TYPEDESC) {
bracedOrTupleExpr.isTypedescExpr = true;
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
final BLangExpression expr = bracedOrTupleExpr.expressions.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
bracedOrTupleExpr.typedescType = actualTypes.get(0);
} else {
bracedOrTupleExpr.typedescType = new BTupleType(actualTypes);
}
resultType = symTable.typeDesc;
} else if (bracedOrTupleExpr.expressions.size() > 1) {
BType actualType = new BTupleType(results);
if (expType.tag == TypeTags.ANY) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.INVALID_TUPLE_LITERAL, expType);
resultType = symTable.errType;
return;
}
List<BType> tupleCompatibleType = getArrayCompatibleTypes(expType, actualType);
if (tupleCompatibleType.isEmpty()) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
} else if (tupleCompatibleType.size() > 1) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
} else if (tupleCompatibleType.get(0).tag == TypeTags.ANY) {
dlog.error(bracedOrTupleExpr.pos, DiagnosticCode.INVALID_TUPLE_LITERAL, expType);
} else {
resultType = types.checkType(bracedOrTupleExpr, actualType, expType);
}
} else {
bracedOrTupleExpr.isBracedExpr = true;
final BType actualType = results.get(0);
BLangExpression expression = bracedOrTupleExpr.expressions.get(0);
resultType = types.checkType(expression, actualType, expType);
}
}
public void visit(BLangTypedescExpr accessExpr) {
BType actualType = symTable.typeDesc;
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.errType;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.errType) {
actualType = exprType;
}
} else {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.errType) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.errType;
BType targetType = symResolver.resolveTypeNode(conversionExpr.typeNode, env);
conversionExpr.targetType = targetType;
BType sourceType = checkExpr(conversionExpr.expr, env, symTable.noType);
BSymbol symbol = symResolver.resolveConversionOperator(sourceType, targetType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(conversionExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CONVERSION, sourceType, targetType);
} else {
BConversionOperatorSymbol conversionSym = (BConversionOperatorSymbol) symbol;
conversionExpr.conversionSymbol = conversionSym;
actualType = conversionSym.type.getReturnType();
}
resultType = types.checkType(conversionExpr, actualType, expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.type = bLangLambdaFunction.function.symbol.type;
bLangLambdaFunction.cachedEnv = env.createClone();
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.type, expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
if (expType.tag != TypeTags.INVOKABLE) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.errType;
return;
}
BInvokableType expectedInvocation = (BInvokableType) this.expType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.expression.type = populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType);
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.expression.type;
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.type = symTable.errType;
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbol(env, names.fromIdNode(bLangXMLQName.prefix), SymTag.XMLNS);
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.UNDEFINED_SYMBOL, prefix);
bLangXMLQName.type = symTable.errType;
return;
}
bLangXMLQName.namespaceURI = ((BXMLNSSymbol) xmlnsSymbol).namespaceURI;
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
checkExpr(bLangXMLAttribute.name, xmlAttributeEnv, symTable.stringType);
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() == NodeKind.XML_QNAME
&& ((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() != NodeKind.XML_QNAME
|| !((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
bLangXMLElementLiteral.namespacesInScope.putAll(namespaces);
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
bLangXMLTextLiteral.concatExpr = getStringTemplateConcatExpr(bLangXMLTextLiteral.textFragments);
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
bLangXMLCommentLiteral.concatExpr = getStringTemplateConcatExpr(bLangXMLCommentLiteral.textFragments);
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
bLangXMLProcInsLiteral.dataConcatExpr = getStringTemplateConcatExpr(bLangXMLProcInsLiteral.dataFragments);
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
bLangXMLQuotedString.concatExpr = getStringTemplateConcatExpr(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
BType actualType = symTable.errType;
checkExpr(xmlAttributeAccessExpr.expr, env, symTable.xmlType);
BLangExpression indexExpr = xmlAttributeAccessExpr.indexExpr;
if (indexExpr == null) {
if (xmlAttributeAccessExpr.lhsVar) {
dlog.error(xmlAttributeAccessExpr.pos, DiagnosticCode.XML_ATTRIBUTE_MAP_UPDATE_NOT_ALLOWED);
} else {
actualType = symTable.xmlAttributesType;
}
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
return;
}
checkExpr(indexExpr, env, symTable.stringType);
if (indexExpr.type.tag == TypeTags.STRING) {
actualType = symTable.stringType;
}
xmlAttributeAccessExpr.namespaces.putAll(symResolver.resolveAllNamespaces(env));
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
stringTemplateLiteral.concatExpr = getStringTemplateConcatExpr(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
BType actualType = symTable.errType;
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.TABLE) {
actualType = expType;
} else if (expTypeTag != TypeTags.ERROR) {
dlog.error(tableQueryExpression.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CONVERSION, expType);
}
BLangTableQuery tableQuery = (BLangTableQuery) tableQueryExpression.getTableQuery();
tableQuery.accept(this);
resultType = types.checkType(tableQueryExpression, actualType, expType);
}
@Override
public void visit(BLangTableQuery tableQuery) {
BLangStreamingInput streamingInput = (BLangStreamingInput) tableQuery.getStreamingInput();
streamingInput.accept(this);
BLangJoinStreamingInput joinStreamingInput = (BLangJoinStreamingInput) tableQuery.getJoinStreamingInput();
if (joinStreamingInput != null) {
joinStreamingInput.accept(this);
}
}
@Override
public void visit(BLangSelectClause selectClause) {
List<? extends SelectExpressionNode> selectExprList = selectClause.getSelectExpressions();
selectExprList.forEach(selectExpr -> ((BLangSelectExpression) selectExpr).accept(this));
BLangGroupBy groupBy = (BLangGroupBy) selectClause.getGroupBy();
if (groupBy != null) {
groupBy.accept(this);
}
BLangHaving having = (BLangHaving) selectClause.getHaving();
if (having != null) {
having.accept(this);
}
}
@Override
public void visit(BLangSelectExpression selectExpression) {
BLangExpression expr = (BLangExpression) selectExpression.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangGroupBy groupBy) {
groupBy.getVariables().forEach(expr -> ((BLangExpression) expr).accept(this));
}
@Override
public void visit(BLangHaving having) {
BLangExpression expr = (BLangExpression) having.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangOrderBy orderBy) {
for (OrderByVariableNode orderByVariableNode : orderBy.getVariables()) {
((BLangOrderByVariable) orderByVariableNode).accept(this);
}
}
@Override
public void visit(BLangOrderByVariable orderByVariable) {
BLangExpression expression = (BLangExpression) orderByVariable.getVariableReference();
expression.accept(this);
}
@Override
public void visit(BLangJoinStreamingInput joinStreamingInput) {
BLangStreamingInput streamingInput = (BLangStreamingInput) joinStreamingInput.getStreamingInput();
streamingInput.accept(this);
}
@Override
public void visit(BLangStreamingInput streamingInput) {
BLangExpression varRef = (BLangExpression) streamingInput.getStreamReference();
varRef.accept(this);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.type = bLangNamedArgsExpression.expr.type;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.type = symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv);
});
Set<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.errType)) {
actualType = symTable.errType;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[matchExprTypes.size()])[0];
} else {
actualType = new BUnionType(null, matchExprTypes, matchExprTypes.contains(symTable.nilType));
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
BType exprType = checkExpr(checkedExpr.expr, env, symTable.noType);
if (exprType.tag != TypeTags.UNION) {
if (types.isAssignable(exprType, symTable.errStructType)) {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS);
} else {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS);
}
checkedExpr.type = symTable.errType;
return;
}
BUnionType unionType = (BUnionType) exprType;
Map<Boolean, List<BType>> resultTypeMap = unionType.memberTypes.stream()
.collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errStructType)));
checkedExpr.equivalentErrorTypeList = resultTypeMap.get(true);
if (checkedExpr.equivalentErrorTypeList == null ||
checkedExpr.equivalentErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS);
checkedExpr.type = symTable.errType;
return;
}
List<BType> nonErrorTypeList = resultTypeMap.get(false);
if (nonErrorTypeList == null || nonErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS);
checkedExpr.type = symTable.errType;
return;
}
BType actualType;
if (nonErrorTypeList.size() == 1) {
actualType = nonErrorTypeList.get(0);
} else {
actualType = new BUnionType(null, new LinkedHashSet<>(nonErrorTypeList),
nonErrorTypeList.contains(symTable.nilType));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.expression, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.errType;
bLangArrowFunction.params.forEach(param -> param.type = symTable.errType);
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.type = bType;
}
}
private void checkSefReferences(DiagnosticPos pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.errType);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
if (pkgAlias == Names.EMPTY && env.enclTypeDefinition != null) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(
env.enclTypeDefinition.name.value, iExpr.name.value));
funcSymbol = symResolver.resolveStructField(iExpr.pos, env, objFuncName,
env.enclTypeDefinition.symbol.type.tsymbol);
if (funcSymbol != symTable.notFoundSymbol) {
iExpr.exprSymbol = symResolver.lookupSymbol(env, Names.SELF, SymTag.VARIABLE);
}
}
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.lookupSymbolInPackage(iExpr.pos, env, pkgAlias, funcName, SymTag.VARIABLE);
}
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName);
resultType = symTable.errType;
return;
}
if (funcSymbol.tag == SymTag.VARIABLE) {
iExpr.functionPointerInvocation = true;
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr, BStructureType structType) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(structType
.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol = symResolver.resolveStructField(iExpr.pos, env, objFuncName, structType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(iExpr.name),
structType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION_IN_OBJECT, iExpr.name.value, structType);
resultType = symTable.errType;
return;
}
if ((funcSymbol.flags & Flags.ATTACHED) != Flags.ATTACHED) {
iExpr.functionPointerInvocation = true;
}
} else {
if (structType.tag == TypeTags.RECORD) {
BAttachedFunction initializerFunc = ((BRecordTypeSymbol) structType.tsymbol).initializerFunc;
if (initializerFunc != null && initializerFunc.funcName.value.equals(iExpr.name.value)) {
dlog.error(iExpr.pos, DiagnosticCode.RECORD_INITIALIZER_INVOKED, structType.tsymbol.toString());
}
}
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(
Symbols.getAttachedFuncSymbolName(bType.toString(), iExpr.name.value));
BPackageSymbol packageSymbol = (BPackageSymbol) bType.tsymbol.owner;
BSymbol funcSymbol = symResolver.lookupMemberSymbol(iExpr.pos, packageSymbol.scope, this.env,
funcName, SymTag.FUNCTION);
if (funcSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName);
resultType = symTable.errType;
return;
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private boolean isIterableOperationInvocation(BLangInvocation iExpr) {
final IterableKind iterableKind = IterableKind.getFromString(iExpr.name.value);
switch (iExpr.expr.type.tag) {
case TypeTags.ARRAY:
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.JSON:
case TypeTags.STREAM:
case TypeTags.TABLE:
case TypeTags.INTERMEDIATE_COLLECTION:
return iterableKind != IterableKind.UNDEFINED;
case TypeTags.XML: {
return iterableKind != IterableKind.SELECT
&& iterableKind != IterableKind.UNDEFINED;
}
}
return false;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
if (iExpr.expr != null) {
actualType = getAccessExprFinalType(iExpr, actualType);
}
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
List<BType> paramTypes = ((BInvokableType) iExpr.symbol.type).getParameterTypes();
int requiredParamsCount;
if (iExpr.symbol.tag == SymTag.VARIABLE) {
requiredParamsCount = paramTypes.size();
} else {
requiredParamsCount = ((BInvokableSymbol) iExpr.symbol).params.size();
}
int i = 0;
BLangExpression vararg = null;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
iExpr.namedArgs.add(expr);
break;
case REST_ARGS_EXPR:
vararg = expr;
break;
default:
if (i < requiredParamsCount) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, requiredParamsCount, vararg);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, int requiredParamsCount,
BLangExpression vararg) {
BType actualType = symTable.errType;
BInvokableSymbol invocableSymbol = (BInvokableSymbol) iExpr.symbol;
if (requiredParamsCount > iExpr.requiredArgs.size()) {
dlog.error(iExpr.pos, DiagnosticCode.NOT_ENOUGH_ARGS_FUNC_CALL, iExpr.name.value);
return actualType;
} else if (invocableSymbol.restParam == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
if (invocableSymbol.defaultableParams.isEmpty()) {
dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
} else {
dlog.error(iExpr.pos, DiagnosticCode.DEFAULTABLE_ARG_PASSED_AS_REQUIRED_ARG, iExpr.name.value);
}
return actualType;
}
checkRequiredArgs(iExpr.requiredArgs, paramTypes);
checkNamedArgs(iExpr.namedArgs, invocableSymbol.defaultableParams);
checkRestArgs(iExpr.restArgs, vararg, invocableSymbol.restParam);
if (iExpr.async) {
return this.generateFutureType(invocableSymbol);
} else {
return invocableSymbol.type.getReturnType();
}
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol) {
BType retType = invocableSymbol.type.getReturnType();
return new BFutureType(TypeTags.FUTURE, retType, null);
}
private void checkRequiredArgs(List<BLangExpression> requiredArgExprs, List<BType> requiredParamTypes) {
for (int i = 0; i < requiredArgExprs.size(); i++) {
checkExpr(requiredArgExprs.get(i), this.env, requiredParamTypes.get(i));
}
}
private void checkNamedArgs(List<BLangExpression> namedArgExprs, List<BVarSymbol> defaultableParams) {
for (BLangExpression expr : namedArgExprs) {
BLangIdentifier argName = ((NamedArgNode) expr).getName();
BVarSymbol varSym = defaultableParams.stream()
.filter(param -> param.getName().value.equals(argName.value))
.findAny()
.orElse(null);
if (varSym == null) {
dlog.error(expr.pos, DiagnosticCode.UNDEFINED_PARAMETER, argName);
break;
}
checkExpr(expr, this.env, varSym.type);
}
}
private void checkRestArgs(List<BLangExpression> restArgExprs, BLangExpression vararg, BVarSymbol restParam) {
if (vararg != null && !restArgExprs.isEmpty()) {
dlog.error(vararg.pos, DiagnosticCode.INVALID_REST_ARGS);
return;
}
if (vararg != null) {
checkExpr(vararg, this.env, restParam.type);
restArgExprs.add(vararg);
return;
}
for (BLangExpression arg : restArgExprs) {
checkExpr(arg, this.env, ((BArrayType) restParam.type).eType);
}
}
private void checkActionInvocationExpr(BLangInvocation iExpr, BType conType) {
BType actualType = symTable.errType;
if (conType == symTable.errType || conType.tag != TypeTags.OBJECT
|| iExpr.expr.symbol.tag != SymTag.ENDPOINT) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION);
resultType = actualType;
return;
}
final BEndpointVarSymbol epSymbol = (BEndpointVarSymbol) iExpr.expr.symbol;
if (!epSymbol.interactable) {
dlog.error(iExpr.pos, DiagnosticCode.ENDPOINT_NOT_SUPPORT_INTERACTIONS, epSymbol.name);
resultType = actualType;
return;
}
BSymbol conSymbol = epSymbol.clientSymbol;
if (conSymbol == null
|| conSymbol == symTable.notFoundSymbol
|| conSymbol == symTable.errSymbol
|| !(conSymbol.type.tag == TypeTags.OBJECT || conSymbol.type.tag == TypeTags.RECORD)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION);
resultType = actualType;
return;
}
Name actionName = names.fromIdNode(iExpr.name);
Name uniqueFuncName = names.fromString(
Symbols.getAttachedFuncSymbolName(conSymbol.name.value, actionName.value));
BPackageSymbol packageSymbol = (BPackageSymbol) conSymbol.owner;
BSymbol actionSym = symResolver.lookupMemberSymbol(iExpr.pos, packageSymbol.scope, this.env,
uniqueFuncName, SymTag.FUNCTION);
if (actionSym == symTable.notFoundSymbol) {
actionSym = symResolver.resolveStructField(iExpr.pos, env, uniqueFuncName, (BTypeSymbol) conSymbol);
}
if (actionSym == symTable.errSymbol || actionSym == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_ACTION, actionName, epSymbol.name, conSymbol.type);
resultType = actualType;
return;
}
iExpr.symbol = actionSym;
checkInvocationParamAndReturnType(iExpr);
}
private void checkRecLiteralKeyValue(BLangRecordKeyValue keyValuePair, BType recType) {
BType fieldType = symTable.errType;
BLangExpression valueExpr = keyValuePair.valueExpr;
switch (recType.tag) {
case TypeTags.RECORD:
fieldType = checkStructLiteralKeyExpr(keyValuePair.key, recType);
break;
case TypeTags.MAP:
fieldType = checkMapLiteralKeyExpr(keyValuePair.key.expr, recType, RecordKind.MAP);
break;
case TypeTags.JSON:
fieldType = checkJSONLiteralKeyExpr(keyValuePair.key, recType, RecordKind.JSON);
if (fieldType.tag == TypeTags.OBJECT || fieldType.tag == TypeTags.RECORD) {
fieldType = new BJSONType(TypeTags.JSON, fieldType, symTable.jsonType.tsymbol);
}
checkExpr(valueExpr, this.env, fieldType);
if (valueExpr.impConversionExpr == null) {
types.checkTypes(valueExpr, Lists.of(valueExpr.type), Lists.of(symTable.jsonType));
} else {
BType valueType = valueExpr.type;
types.checkType(valueExpr, valueExpr.impConversionExpr.type, symTable.jsonType);
valueExpr.type = valueType;
}
resultType = valueExpr.type;
return;
}
checkExpr(valueExpr, this.env, fieldType);
}
private BType checkStructLiteralKeyExpr(BLangRecordKey key, BType recordType) {
Name fieldName;
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else {
dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY);
return symTable.errType;
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env,
fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
if (((BRecordType) recordType).sealed) {
dlog.error(keyExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName,
recordType.tsymbol.type.getKind().typeName(), recordType.tsymbol);
return symTable.errType;
}
return ((BRecordType) recordType).restFieldType;
}
return fieldSymbol.type;
}
private BType checkJSONLiteralKeyExpr(BLangRecordKey key, BType recordType, RecordKind recKind) {
BJSONType type = (BJSONType) recordType;
if (type.constraint.tag != TypeTags.NONE && type.constraint.tag != TypeTags.ERROR) {
return checkStructLiteralKeyExpr(key, type.constraint);
}
if (checkRecLiteralKeyExpr(key.expr, recKind).tag != TypeTags.STRING) {
return symTable.errType;
}
return symTable.jsonType;
}
private BType checkMapLiteralKeyExpr(BLangExpression keyExpr, BType recordType, RecordKind recKind) {
if (checkRecLiteralKeyExpr(keyExpr, recKind).tag != TypeTags.STRING) {
return symTable.errType;
}
return ((BMapType) recordType).constraint;
}
private BType checkRecLiteralKeyExpr(BLangExpression keyExpr, RecordKind recKind) {
if (keyExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return checkExpr(keyExpr, this.env, symTable.stringType);
}
keyExpr.type = symTable.stringType;
return keyExpr.type;
}
private BType checkIndexExprForStructFieldAccess(BLangExpression indexExpr) {
if (indexExpr.getKind() != NodeKind.LITERAL) {
indexExpr.type = symTable.errType;
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_INDEX_EXPR_STRUCT_FIELD_ACCESS);
return indexExpr.type;
}
return checkExpr(indexExpr, this.env, symTable.stringType);
}
private BType checkTypeForIndexBasedAccess(BLangIndexBasedAccess indexBasedAccessExpr, BType actualType) {
if (actualType.tag == TypeTags.ANY || actualType.tag == TypeTags.JSON) {
return actualType;
}
if (indexBasedAccessExpr.leafNode && indexBasedAccessExpr.lhsVar) {
return actualType;
}
BUnionType type = new BUnionType(null, new LinkedHashSet<>(getTypesList(actualType)), true);
type.memberTypes.add(symTable.nilType);
return type;
}
private BType checkStructFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BType structType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, structType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
if (structType.tag == TypeTags.OBJECT) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(structType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(varReferExpr.pos, env, objFuncName, structType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(varReferExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName,
structType.tsymbol.type.getKind().typeName(), structType.tsymbol);
return symTable.errType;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
if (((BRecordType) structType).sealed) {
dlog.error(varReferExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName,
structType.tsymbol.type.getKind().typeName(), structType.tsymbol);
return symTable.errType;
}
return ((BRecordType) structType).restFieldType;
}
private BType checkTupleFieldType(BLangIndexBasedAccess indexBasedAccessExpr, BType varRefType, int indexValue) {
List<BType> tupleTypes = ((BTupleType) varRefType).tupleTypes;
if (indexValue < 0 || tupleTypes.size() <= indexValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticCode.TUPLE_INDEX_OUT_OF_RANGE, indexValue, tupleTypes.size());
return symTable.errType;
}
return tupleTypes.get(indexValue);
}
private BType checkIndexExprForTupleFieldAccess(BLangExpression indexExpr) {
if (indexExpr.getKind() != NodeKind.LITERAL) {
indexExpr.type = symTable.errType;
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_INDEX_EXPR_TUPLE_FIELD_ACCESS);
return indexExpr.type;
}
return checkExpr(indexExpr, this.env, symTable.intType);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName != null) {
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
}
if (endTagName == null) {
return;
}
if (startTagName.getKind() == NodeKind.XML_QNAME && startTagName.getKind() == NodeKind.XML_QNAME
&& startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && startTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(startTagName.pos, DiagnosticCode.XML_TAGS_MISMATCH);
}
private BLangExpression getStringTemplateConcatExpr(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
if (concatExpr == null) {
concatExpr = expr;
continue;
}
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.stringType, expr.type);
if (opSymbol == symTable.notFoundSymbol && expr.type != symTable.errType) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType, expr.type);
}
concatExpr = getBinaryAddExpr(concatExpr, expr, opSymbol);
}
return concatExpr;
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
BLangExpression strConcatExpr = null;
for (BLangExpression expr : exprs) {
BType exprType = checkExpr(expr, xmlElementEnv);
if (exprType == symTable.xmlType) {
if (strConcatExpr != null) {
newChildren.add(getXMLTextLiteral(strConcatExpr));
strConcatExpr = null;
}
newChildren.add(expr);
continue;
}
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.stringType, exprType);
if (opSymbol == symTable.notFoundSymbol && exprType != symTable.errType) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.xmlType, exprType);
}
if (strConcatExpr == null) {
strConcatExpr = expr;
continue;
}
strConcatExpr = getBinaryAddExpr(strConcatExpr, expr, opSymbol);
}
if (strConcatExpr != null) {
newChildren.add(getXMLTextLiteral(strConcatExpr));
}
return newChildren;
}
private BLangExpression getBinaryAddExpr(BLangExpression lExpr, BLangExpression rExpr, BSymbol opSymbol) {
BLangBinaryExpr binaryExpressionNode = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpressionNode.lhsExpr = lExpr;
binaryExpressionNode.rhsExpr = rExpr;
binaryExpressionNode.pos = rExpr.pos;
binaryExpressionNode.opKind = OperatorKind.ADD;
if (opSymbol != symTable.notFoundSymbol) {
binaryExpressionNode.type = opSymbol.type.getReturnType();
binaryExpressionNode.opSymbol = (BOperatorSymbol) opSymbol;
} else {
binaryExpressionNode.type = symTable.errType;
}
types.checkType(binaryExpressionNode, binaryExpressionNode.type, symTable.stringType);
return binaryExpressionNode;
}
private BLangExpression getXMLTextLiteral(BLangExpression contentExpr) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.concatExpr = contentExpr;
xmlTextLiteral.pos = contentExpr.pos;
return xmlTextLiteral;
}
private BType getTypeOfExprInFieldAccess(BLangExpression expr) {
checkExpr(expr, this.env, symTable.noType);
return expr.type;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = new BUnionType(null, new LinkedHashSet<>(), false);
if (actualType.tag == TypeTags.UNION) {
unionType.memberTypes.addAll(((BUnionType) actualType).memberTypes);
unionType.setNullable(actualType.isNullable());
} else {
unionType.memberTypes.add(actualType);
}
if (returnsNull(accessExpr)) {
unionType.memberTypes.add(symTable.nilType);
unionType.setNullable(true);
}
BType parentType = accessExpr.expr.type;
if (accessExpr.safeNavigate && (parentType.tag == TypeTags.ERROR || (parentType.tag == TypeTags.UNION &&
((BUnionType) parentType).memberTypes.contains(symTable.errStructType)))) {
unionType.memberTypes.add(symTable.errStructType);
}
if (unionType.memberTypes.size() == 1 &&
(!unionType.isNullable() || unionType.memberTypes.contains(symTable.nilType))) {
return unionType.memberTypes.toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.type;
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.type).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.errType;
switch (varRefType.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
actualType = checkStructFieldAccess(fieldAccessExpr, fieldName, varRefType);
break;
case TypeTags.MAP:
actualType = ((BMapType) varRefType).getConstraint();
break;
case TypeTags.STREAM:
BType streamConstraintType = ((BStreamType) varRefType).constraint;
if (streamConstraintType.tag == TypeTags.RECORD) {
actualType = checkStructFieldAccess(fieldAccessExpr, fieldName, streamConstraintType);
}
break;
case TypeTags.JSON:
BType constraintType = ((BJSONType) varRefType).constraint;
if (constraintType.tag == TypeTags.OBJECT || constraintType.tag == TypeTags.RECORD) {
BType fieldType = checkStructFieldAccess(fieldAccessExpr, fieldName, constraintType);
if (fieldType.tag == TypeTags.OBJECT || fieldType.tag == TypeTags.RECORD) {
actualType = new BJSONType(TypeTags.JSON, fieldType, symTable.jsonType.tsymbol);
break;
}
}
actualType = symTable.jsonType;
break;
case TypeTags.XML:
if (fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
break;
}
actualType = symTable.xmlType;
break;
case TypeTags.ERROR:
break;
default:
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr, BType varRefType) {
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.errType;
BType indexExprType;
switch (varRefType.tag) {
case TypeTags.OBJECT:
indexExprType = checkIndexExprForStructFieldAccess(indexExpr);
if (indexExprType.tag == TypeTags.STRING) {
String fieldName = (String) ((BLangLiteral) indexExpr).value;
actualType = checkStructFieldAccess(indexBasedAccessExpr, names.fromString(fieldName), varRefType);
}
break;
case TypeTags.RECORD:
indexExprType = checkIndexExprForStructFieldAccess(indexExpr);
if (indexExprType.tag == TypeTags.STRING) {
String fieldName = (String) ((BLangLiteral) indexExpr).value;
actualType = checkStructFieldAccess(indexBasedAccessExpr, names.fromString(fieldName), varRefType);
actualType = checkTypeForIndexBasedAccess(indexBasedAccessExpr, actualType);
}
break;
case TypeTags.MAP:
indexExprType = checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExprType.tag == TypeTags.STRING) {
actualType = ((BMapType) varRefType).getConstraint();
actualType = checkTypeForIndexBasedAccess(indexBasedAccessExpr, actualType);
}
break;
case TypeTags.JSON:
BType constraintType = ((BJSONType) varRefType).constraint;
if (constraintType.tag == TypeTags.OBJECT || constraintType.tag == TypeTags.RECORD) {
indexExprType = checkIndexExprForStructFieldAccess(indexExpr);
if (indexExprType.tag != TypeTags.STRING) {
break;
}
String fieldName = (String) ((BLangLiteral) indexExpr).value;
BType fieldType =
checkStructFieldAccess(indexBasedAccessExpr, names.fromString(fieldName), constraintType);
if (fieldType.tag == TypeTags.OBJECT || fieldType.tag == TypeTags.RECORD) {
actualType = new BJSONType(TypeTags.JSON, fieldType, symTable.jsonType.tsymbol);
break;
}
} else {
indexExprType = checkExpr(indexExpr, this.env, symTable.noType);
if (indexExprType.tag != TypeTags.STRING && indexExprType.tag != TypeTags.INT) {
dlog.error(indexExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType,
indexExprType);
break;
}
}
actualType = symTable.jsonType;
break;
case TypeTags.ARRAY:
indexExprType = checkExpr(indexExpr, this.env, symTable.intType);
if (indexExprType.tag == TypeTags.INT) {
actualType = ((BArrayType) varRefType).getElementType();
}
break;
case TypeTags.XML:
if (indexBasedAccessExpr.lhsVar) {
indexExpr.type = symTable.errType;
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
break;
}
checkExpr(indexExpr, this.env);
actualType = symTable.xmlType;
break;
case TypeTags.TUPLE:
indexExprType = checkIndexExprForTupleFieldAccess(indexExpr);
if (indexExprType.tag == TypeTags.INT) {
int indexValue = ((Long) ((BLangLiteral) indexExpr).value).intValue();
actualType = checkTupleFieldType(indexBasedAccessExpr, varRefType, indexValue);
}
break;
case TypeTags.ERROR:
indexBasedAccessExpr.indexExpr.type = symTable.errType;
break;
default:
indexBasedAccessExpr.indexExpr.type = symTable.errType;
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING,
indexBasedAccessExpr.expr.type);
}
return actualType;
}
private BType getSafeType(BType type, BLangAccessExpression accessExpr) {
if (accessExpr.safeNavigate && type == symTable.errStructType) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.errType;
}
if (type.tag != TypeTags.UNION) {
return type;
}
Set<BType> varRefMemberTypes = ((BUnionType) type).memberTypes;
List<BType> lhsTypes;
boolean nullable = false;
if (accessExpr.safeNavigate) {
if (!varRefMemberTypes.contains(symTable.errStructType)) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.errType;
}
lhsTypes = varRefMemberTypes.stream().filter(memberType -> {
return memberType != symTable.errStructType && memberType != symTable.nilType;
}).collect(Collectors.toList());
if (lhsTypes.isEmpty()) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.errType;
}
} else {
lhsTypes = varRefMemberTypes.stream().filter(memberType -> {
return memberType != symTable.nilType;
}).collect(Collectors.toList());
}
if (lhsTypes.size() == 1) {
return lhsTypes.get(0);
}
return new BUnionType(null, new LinkedHashSet<>(lhsTypes), nullable);
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.memberTypes);
} else {
return Lists.of(type);
}
}
private Set<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.type);
Set<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.type;
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.ERROR || patternExprType.tag == TypeTags.ERROR) {
return new HashSet<>(Lists.of(symTable.errType));
}
assignable = this.types.isAssignable(type, pattern.variable.type);
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
} |
should we remove the doFinally logic from the tests? | public void executeBulk_cancel() throws InterruptedException {
int totalRequest = 100;
this.container = createContainer(database);
List<com.azure.cosmos.models.CosmosItemOperation> cosmosItemOperations = new ArrayList<>();
for (int i = 0; i < totalRequest; i++) {
String partitionKey = UUID.randomUUID().toString();
BatchTestBase.TestDoc testDoc = this.populateTestDoc(partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(testDoc,
new PartitionKey(partitionKey)));
partitionKey = UUID.randomUUID().toString();
BatchTestBase.EventDoc eventDoc = new BatchTestBase.EventDoc(UUID.randomUUID().toString(), 2, 4, "type1",
partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(eventDoc,
new PartitionKey(partitionKey)));
}
com.azure.cosmos.models.CosmosItemOperation[] itemOperationsArray =
new com.azure.cosmos.models.CosmosItemOperation[cosmosItemOperations.size()];
cosmosItemOperations.toArray(itemOperationsArray);
CosmosBulkExecutionOptions cosmosBulkExecutionOptions = new CosmosBulkExecutionOptions();
Flux<CosmosItemOperation> inputFlux = Flux
.fromArray(itemOperationsArray)
.delayElements(Duration.ofMillis(100));
final BulkExecutor<BulkExecutorTest> executor = new BulkExecutor<>(
container,
inputFlux,
cosmosBulkExecutionOptions);
Flux<com.azure.cosmos.models.CosmosBulkOperationResponse<BulkExecutorTest>> bulkResponseFlux =
Flux.deferContextual(context -> executor
.execute()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.info("BulkExecutor.execute flux completed -
executor.getItemsLeftSnapshot(),
executor.getOperationContext());
} else {
int itemsLeftSnapshot = executor.getItemsLeftSnapshot();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
+ "Context: {}",
signal,
itemsLeftSnapshot,
executor.getOperationContext());
} else {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
+ "Context: {}",
signal,
itemsLeftSnapshot,
executor.getOperationContext());
}
}
executor.dispose();
}));
Disposable disposable = bulkResponseFlux.subscribe();
disposable.dispose();
int iterations = 0;
while (true) {
assertThat(iterations < 100);
if (executor.isDisposed()) {
break;
}
Thread.sleep(10);
iterations++;
}
} | .doFinally((SignalType signal) -> { | public void executeBulk_cancel() throws InterruptedException {
int totalRequest = 100;
this.container = createContainer(database);
List<CosmosItemOperation> cosmosItemOperations = new ArrayList<>();
for (int i = 0; i < totalRequest; i++) {
String partitionKey = UUID.randomUUID().toString();
BatchTestBase.TestDoc testDoc = this.populateTestDoc(partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(testDoc,
new PartitionKey(partitionKey)));
partitionKey = UUID.randomUUID().toString();
BatchTestBase.EventDoc eventDoc = new BatchTestBase.EventDoc(UUID.randomUUID().toString(), 2, 4, "type1",
partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(eventDoc,
new PartitionKey(partitionKey)));
}
CosmosItemOperation[] itemOperationsArray =
new CosmosItemOperation[cosmosItemOperations.size()];
cosmosItemOperations.toArray(itemOperationsArray);
CosmosBulkExecutionOptions cosmosBulkExecutionOptions = new CosmosBulkExecutionOptions();
Flux<CosmosItemOperation> inputFlux = Flux
.fromArray(itemOperationsArray)
.delayElements(Duration.ofMillis(100));
final BulkExecutor<BulkExecutorTest> executor = new BulkExecutor<>(
container,
inputFlux,
cosmosBulkExecutionOptions);
Flux<com.azure.cosmos.models.CosmosBulkOperationResponse<BulkExecutorTest>> bulkResponseFlux =
Flux.deferContextual(context -> executor.execute());
Disposable disposable = bulkResponseFlux.subscribe();
disposable.dispose();
int iterations = 0;
while (true) {
assertThat(iterations < 100);
if (executor.isDisposed()) {
break;
}
Thread.sleep(10);
iterations++;
}
} | class BulkExecutorTest extends BatchTestBase {
private CosmosAsyncClient client;
private CosmosAsyncContainer container;
private CosmosAsyncDatabase database;
private String preExistingDatabaseId = CosmosDatabaseForTest.generateId();
@Factory(dataProvider = "clientBuilders")
public BulkExecutorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@AfterClass(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
logger.info("starting ....");
safeDeleteDatabase(database);
safeCloseClient(client);
}
@AfterMethod(groups = { "emulator" })
public void afterTest() throws Exception {
if (this.container != null) {
try {
this.container.delete().block();
} catch (CosmosException error) {
if (error.getStatusCode() != 404) {
throw error;
}
}
}
}
@BeforeMethod(groups = { "emulator" })
public void beforeTest() throws Exception {
this.container = null;
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT)
public void before_CosmosContainerTest() {
client = getClientBuilder().buildAsyncClient();
database = createDatabase(client, preExistingDatabaseId);
}
static protected CosmosAsyncContainer createContainer(CosmosAsyncDatabase database) {
String collectionName = UUID.randomUUID().toString();
CosmosContainerProperties containerProperties = getCollectionDefinition(collectionName);
database.createContainer(containerProperties).block();
return database.getContainer(collectionName);
}
static protected CosmosAsyncDatabase createDatabase(CosmosAsyncClient client, String databaseId) {
CosmosDatabaseProperties databaseSettings = new CosmosDatabaseProperties(databaseId);
client.createDatabase(databaseSettings).block();
return client.getDatabase(databaseSettings.getId());
}
static protected CosmosAsyncDatabase createDatabaseIfNotExists(CosmosAsyncClient client, String databaseId) {
List<CosmosDatabaseProperties> res = client.queryDatabases(String.format("SELECT * FROM r where r.id = '%s'",
databaseId), null)
.collectList()
.block();
if (res.size() != 0) {
CosmosAsyncDatabase database = client.getDatabase(databaseId);
database.read().block();
return database;
} else {
CosmosDatabaseProperties databaseSettings = new CosmosDatabaseProperties(databaseId);
client.createDatabase(databaseSettings).block();
return client.getDatabase(databaseSettings.getId());
}
}
@Test(groups = { "emulator" }, timeOut = TIMEOUT)
@Test(groups = { "emulator" }, timeOut = TIMEOUT)
public void executeBulk_complete() throws InterruptedException {
int totalRequest = 10;
this.container = createContainer(database);
List<com.azure.cosmos.models.CosmosItemOperation> cosmosItemOperations = new ArrayList<>();
for (int i = 0; i < totalRequest; i++) {
String partitionKey = UUID.randomUUID().toString();
BatchTestBase.TestDoc testDoc = this.populateTestDoc(partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(testDoc,
new PartitionKey(partitionKey)));
partitionKey = UUID.randomUUID().toString();
BatchTestBase.EventDoc eventDoc = new BatchTestBase.EventDoc(UUID.randomUUID().toString(), 2, 4, "type1",
partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(eventDoc,
new PartitionKey(partitionKey)));
}
com.azure.cosmos.models.CosmosItemOperation[] itemOperationsArray =
new com.azure.cosmos.models.CosmosItemOperation[cosmosItemOperations.size()];
cosmosItemOperations.toArray(itemOperationsArray);
CosmosBulkExecutionOptions cosmosBulkExecutionOptions = new CosmosBulkExecutionOptions();
final BulkExecutor<BulkExecutorTest> executor = new BulkExecutor<>(
container,
Flux.fromArray(itemOperationsArray),
cosmosBulkExecutionOptions);
Flux<com.azure.cosmos.models.CosmosBulkOperationResponse<BulkExecutorTest>> bulkResponseFlux =
Flux.deferContextual(context -> executor
.execute()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
executor.getItemsLeftSnapshot(),
executor.getOperationContext());
} else {
int itemsLeftSnapshot = executor.getItemsLeftSnapshot();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
+ "Context: {}",
signal,
itemsLeftSnapshot,
executor.getOperationContext());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
+ "Context: {}",
signal,
itemsLeftSnapshot,
executor.getOperationContext());
}
}
executor.dispose();
}));
Mono<List<CosmosBulkOperationResponse<BulkExecutorTest>>> convertToListMono = bulkResponseFlux
.collect(Collectors.toList());
List<CosmosBulkOperationResponse<BulkExecutorTest>> bulkResponse = convertToListMono.block();
assertThat(bulkResponse.size()).isEqualTo(totalRequest * 2);
for (com.azure.cosmos.models.CosmosBulkOperationResponse<BulkExecutorTest> cosmosBulkOperationResponse :
bulkResponse) {
com.azure.cosmos.models.CosmosBulkItemResponse cosmosBulkItemResponse =
cosmosBulkOperationResponse.getResponse();
assertThat(cosmosBulkItemResponse.getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code());
assertThat(cosmosBulkItemResponse.getRequestCharge()).isGreaterThan(0);
assertThat(cosmosBulkItemResponse.getCosmosDiagnostics().toString()).isNotNull();
assertThat(cosmosBulkItemResponse.getSessionToken()).isNotNull();
assertThat(cosmosBulkItemResponse.getActivityId()).isNotNull();
assertThat(cosmosBulkItemResponse.getRequestCharge()).isNotNull();
}
int iterations = 0;
while (true) {
assertThat(iterations < 100);
if (executor.isDisposed()) {
break;
}
Thread.sleep(10);
iterations++;
}
}
static protected void safeClose(CosmosAsyncClient client) {
if (client != null) {
try {
client.close();
} catch (Exception e) {
logger.error("failed to close client", e);
}
}
}
static protected void safeCloseAsync(CosmosAsyncClient client) {
if (client != null) {
new Thread(() -> {
try {
client.close();
} catch (Exception e) {
logger.error("failed to close client", e);
}
}).start();
}
}
static protected void safeCloseClient(CosmosAsyncClient client) {
if (client != null) {
try {
logger.info("closing client ...");
client.close();
logger.info("closing client completed");
} catch (Exception e) {
logger.error("failed to close client", e);
}
}
}
static protected void safeDeleteAllCollections(CosmosAsyncDatabase database) {
if (database != null) {
List<CosmosContainerProperties> collections = database.readAllContainers()
.collectList()
.block();
for (CosmosContainerProperties collection : collections) {
database.getContainer(collection.getId()).delete().block();
}
}
}
static protected void safeDeleteCollection(CosmosAsyncContainer collection) {
if (collection != null) {
try {
collection.delete().block();
} catch (Exception e) {
}
}
}
static protected void safeDeleteCollection(CosmosAsyncDatabase database, String collectionId) {
if (database != null && collectionId != null) {
try {
database.getContainer(collectionId).delete().block();
} catch (Exception e) {
}
}
}
static protected void safeDeleteDatabase(CosmosAsyncDatabase database) {
if (database != null) {
try {
database.delete().block();
} catch (Exception e) {
}
}
}
static protected void safeDeleteSyncDatabase(CosmosDatabase database) {
if (database != null) {
try {
logger.info("attempting to delete database ....");
database.delete();
logger.info("database deletion completed");
} catch (Exception e) {
logger.error("failed to delete sync database", e);
}
}
}
} | class BulkExecutorTest extends BatchTestBase {
private CosmosAsyncClient client;
private CosmosAsyncContainer container;
private CosmosAsyncDatabase database;
private String preExistingDatabaseId = CosmosDatabaseForTest.generateId();
@Factory(dataProvider = "clientBuilders")
public BulkExecutorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@AfterClass(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
logger.info("starting ....");
safeDeleteDatabase(database);
safeCloseAsync(client);
}
@AfterMethod(groups = { "emulator" })
public void afterTest() throws Exception {
if (this.container != null) {
try {
this.container.delete().block();
} catch (CosmosException error) {
if (error.getStatusCode() != 404) {
throw error;
}
}
}
}
@BeforeMethod(groups = { "emulator" })
public void beforeTest() throws Exception {
this.container = null;
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT)
public void before_CosmosContainerTest() {
client = getClientBuilder().buildAsyncClient();
database = createDatabase(client, preExistingDatabaseId);
}
static protected CosmosAsyncContainer createContainer(CosmosAsyncDatabase database) {
String collectionName = UUID.randomUUID().toString();
CosmosContainerProperties containerProperties = getCollectionDefinition(collectionName);
database.createContainer(containerProperties).block();
return database.getContainer(collectionName);
}
@Test(groups = { "emulator" }, timeOut = TIMEOUT)
@Test(groups = { "emulator" }, timeOut = TIMEOUT)
public void executeBulk_complete() throws InterruptedException {
int totalRequest = 10;
this.container = createContainer(database);
List<CosmosItemOperation> cosmosItemOperations = new ArrayList<>();
for (int i = 0; i < totalRequest; i++) {
String partitionKey = UUID.randomUUID().toString();
BatchTestBase.TestDoc testDoc = this.populateTestDoc(partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(testDoc,
new PartitionKey(partitionKey)));
partitionKey = UUID.randomUUID().toString();
BatchTestBase.EventDoc eventDoc = new BatchTestBase.EventDoc(UUID.randomUUID().toString(), 2, 4, "type1",
partitionKey);
cosmosItemOperations.add(CosmosBulkOperations.getCreateItemOperation(eventDoc,
new PartitionKey(partitionKey)));
}
CosmosItemOperation[] itemOperationsArray =
new CosmosItemOperation[cosmosItemOperations.size()];
cosmosItemOperations.toArray(itemOperationsArray);
CosmosBulkExecutionOptions cosmosBulkExecutionOptions = new CosmosBulkExecutionOptions();
final BulkExecutor<BulkExecutorTest> executor = new BulkExecutor<>(
container,
Flux.fromArray(itemOperationsArray),
cosmosBulkExecutionOptions);
Flux<com.azure.cosmos.models.CosmosBulkOperationResponse<BulkExecutorTest>> bulkResponseFlux =
Flux.deferContextual(context -> executor.execute());
Mono<List<CosmosBulkOperationResponse<BulkExecutorTest>>> convertToListMono = bulkResponseFlux
.collect(Collectors.toList());
List<CosmosBulkOperationResponse<BulkExecutorTest>> bulkResponse = convertToListMono.block();
assertThat(bulkResponse.size()).isEqualTo(totalRequest * 2);
for (com.azure.cosmos.models.CosmosBulkOperationResponse<BulkExecutorTest> cosmosBulkOperationResponse :
bulkResponse) {
com.azure.cosmos.models.CosmosBulkItemResponse cosmosBulkItemResponse =
cosmosBulkOperationResponse.getResponse();
assertThat(cosmosBulkItemResponse.getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code());
assertThat(cosmosBulkItemResponse.getRequestCharge()).isGreaterThan(0);
assertThat(cosmosBulkItemResponse.getCosmosDiagnostics().toString()).isNotNull();
assertThat(cosmosBulkItemResponse.getSessionToken()).isNotNull();
assertThat(cosmosBulkItemResponse.getActivityId()).isNotNull();
assertThat(cosmosBulkItemResponse.getRequestCharge()).isNotNull();
}
int iterations = 0;
while (true) {
assertThat(iterations < 100);
if (executor.isDisposed()) {
break;
}
Thread.sleep(10);
iterations++;
}
}
} |
Any news on this one? It looks like you didn't update the PR, is it because `@JdbcTypeCode` didn't work? | public Duration getLatestLunchBreakDuration() {
return latestLunchBreakDuration;
} | return latestLunchBreakDuration; | public Duration getLatestLunchBreakDuration() {
return latestLunchBreakDuration;
} | class Person {
private long id;
private String name;
private SequencedAddress address;
private Status status;
private Duration latestLunchBreakDuration = Duration.ZERO;
public Person() {
}
public Person(long id, String name, SequencedAddress address) {
this.id = id;
this.name = name;
this.address = address;
}
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "personSeq")
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
public SequencedAddress getAddress() {
return address;
}
public void setAddress(SequencedAddress address) {
this.address = address;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
/**
* Need to explicitly set the scale (and the precision so that the scale will actually be read from the annotation).
* Postgresql would only allow maximum scale of 6 for a `interval second`.
*
* @see org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType
*/
@Column(precision = 5, scale = 5)
@JdbcType(PostgreSQLIntervalSecondJdbcType.class)
public void setLatestLunchBreakDuration(Duration duration) {
this.latestLunchBreakDuration = duration;
}
public void describeFully(StringBuilder sb) {
sb.append("Person with id=").append(id).append(", name='").append(name).append("', status='").append(status)
.append("', latestLunchBreakDuration='").append(latestLunchBreakDuration)
.append("', address { ");
getAddress().describeFully(sb);
sb.append(" }");
}
} | class Person {
private long id;
private String name;
private SequencedAddress address;
private Status status;
private Duration latestLunchBreakDuration = Duration.ZERO;
public Person() {
}
public Person(long id, String name, SequencedAddress address) {
this.id = id;
this.name = name;
this.address = address;
}
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "personSeq")
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
public SequencedAddress getAddress() {
return address;
}
public void setAddress(SequencedAddress address) {
this.address = address;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
/**
* Need to explicitly set the scale (and the precision so that the scale will actually be read from the annotation).
* Postgresql would only allow maximum scale of 6 for a `interval second`.
*
* @see org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType
*/
@Column(precision = 5, scale = 5)
@JdbcType(PostgreSQLIntervalSecondJdbcType.class)
public void setLatestLunchBreakDuration(Duration duration) {
this.latestLunchBreakDuration = duration;
}
public void describeFully(StringBuilder sb) {
sb.append("Person with id=").append(id).append(", name='").append(name).append("', status='").append(status)
.append("', latestLunchBreakDuration='").append(latestLunchBreakDuration)
.append("', address { ");
getAddress().describeFully(sb);
sb.append(" }");
}
} |
Should all the getPrivs() methods of UserPriveTable/TablePriveTable/DbPrivTable/ResourcePrivTable use getReadOnlyIteratorByUser to get the the entiryLIst? | public void getPrivs(UserIdentity currentUser, String db, PrivBitSet savedPrivs) {
List<PrivEntry> userPrivEntryList = map.get(currentUser);
if (userPrivEntryList == null) {
return;
}
DbPrivEntry matchedEntry = null;
for (PrivEntry entry : userPrivEntryList) {
DbPrivEntry dbPrivEntry = (DbPrivEntry) entry;
if (!dbPrivEntry.isAnyDb() && !dbPrivEntry.getDbPattern().match(db)) {
continue;
}
matchedEntry = dbPrivEntry;
break;
}
if (matchedEntry == null) {
return;
}
savedPrivs.or(matchedEntry.getPrivSet());
} | List<PrivEntry> userPrivEntryList = map.get(currentUser); | public void getPrivs(UserIdentity currentUser, String db, PrivBitSet savedPrivs) {
List<PrivEntry> userPrivEntryList = map.get(currentUser);
if (userPrivEntryList == null) {
return;
}
DbPrivEntry matchedEntry = null;
for (PrivEntry entry : userPrivEntryList) {
DbPrivEntry dbPrivEntry = (DbPrivEntry) entry;
if (!dbPrivEntry.isAnyDb() && !dbPrivEntry.getDbPattern().match(db)) {
continue;
}
matchedEntry = dbPrivEntry;
break;
}
if (matchedEntry == null) {
return;
}
savedPrivs.or(matchedEntry.getPrivSet());
} | class DbPrivTable extends PrivTable {
private static final Logger LOG = LogManager.getLogger(DbPrivTable.class);
/*
* Return first priv which match the user@host on db.* The returned priv will be
* saved in 'savedPrivs'.
*/
/*
* Check if current user has specified privilege on any database
*/
public boolean hasPriv(UserIdentity currentUser, PrivPredicate wanted) {
Iterator<PrivEntry> iter = getReadOnlyIteratorByUser(currentUser);
while (iter.hasNext()) {
DbPrivEntry dbPrivEntry = (DbPrivEntry) iter.next();
if (dbPrivEntry.privSet.satisfy(wanted)) {
return true;
}
}
return false;
}
public boolean hasClusterPriv(ConnectContext ctx, String clusterName) {
Iterator<PrivEntry> iter = this.getFullReadOnlyIterator();
while (iter.hasNext()) {
DbPrivEntry dbPrivEntry = (DbPrivEntry) iter.next();
if (dbPrivEntry.getOrigDb().startsWith(clusterName)) {
return true;
}
}
return false;
}
@Override
public void write(DataOutput out) throws IOException {
if (!isClassNameWrote) {
String className = DbPrivTable.class.getCanonicalName();
Text.writeString(out, className);
isClassNameWrote = true;
}
super.write(out);
}
} | class DbPrivTable extends PrivTable {
private static final Logger LOG = LogManager.getLogger(DbPrivTable.class);
/*
* Return first priv which match the user@host on db.* The returned priv will be
* saved in 'savedPrivs'.
*/
/*
* Check if current user has specified privilege on any database
*/
public boolean hasPriv(UserIdentity currentUser, PrivPredicate wanted) {
Iterator<PrivEntry> iter = getReadOnlyIteratorByUser(currentUser);
while (iter.hasNext()) {
DbPrivEntry dbPrivEntry = (DbPrivEntry) iter.next();
if (dbPrivEntry.privSet.satisfy(wanted)) {
return true;
}
}
return false;
}
public boolean hasClusterPriv(ConnectContext ctx, String clusterName) {
Iterator<PrivEntry> iter = this.getFullReadOnlyIterator();
while (iter.hasNext()) {
DbPrivEntry dbPrivEntry = (DbPrivEntry) iter.next();
if (dbPrivEntry.getOrigDb().startsWith(clusterName)) {
return true;
}
}
return false;
}
@Override
public void write(DataOutput out) throws IOException {
if (!isClassNameWrote) {
String className = DbPrivTable.class.getCanonicalName();
Text.writeString(out, className);
isClassNameWrote = true;
}
super.write(out);
}
} |
I think `IllegalArgumentException` is fine but since it a static method I'm not sure if( we should use a logger or not. I don't think we want to have static loggers. Any thoughts @srngar? | public static KeyVaultRoleScope fromUrl(String url) {
try {
return fromString(new URL(url).getPath(), KeyVaultRoleScope.class);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
} | throw new RuntimeException(e); | public static KeyVaultRoleScope fromUrl(String url) {
try {
return fromString(new URL(url).getPath(), KeyVaultRoleScope.class);
} catch (MalformedURLException e) {
throw new IllegalArgumentException(e);
}
} | class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> {
public static final KeyVaultRoleScope GLOBAL = fromString("/");
public static final KeyVaultRoleScope KEYS = fromString("/keys");
/**
* Creates or finds a {@link KeyVaultRoleScope} from its string representation.
*
* @param name A name to look for.
* @return The corresponding {@link KeyVaultRoleScope}.
*/
public static KeyVaultRoleScope fromString(String name) {
return fromString(name, KeyVaultRoleScope.class);
}
/**
* Creates or finds a {@link KeyVaultRoleScope} from its string representation.
*
* @param url A string representing a URL containing the name of the scope to look for.
* @return The corresponding {@link KeyVaultRoleScope}.
*/
/**
* Creates or finds a {@link KeyVaultRoleScope} from its string representation.
*
* @param url A URL containing the name of the scope to look for.
* @return The corresponding {@link KeyVaultRoleScope}.
*/
public static KeyVaultRoleScope fromUrl(URL url) {
return fromString(url.getPath(), KeyVaultRoleScope.class);
}
} | class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> {
public static final KeyVaultRoleScope GLOBAL = fromString("/");
public static final KeyVaultRoleScope KEYS = fromString("/keys");
/**
* Creates or finds a {@link KeyVaultRoleScope} from its string representation.
*
* @param name A name to look for.
* @return The corresponding {@link KeyVaultRoleScope}.
*/
public static KeyVaultRoleScope fromString(String name) {
return fromString(name, KeyVaultRoleScope.class);
}
/**
* Creates or finds a {@link KeyVaultRoleScope} from its string representation.
*
* @param url A string representing a URL containing the name of the scope to look for.
* @return The corresponding {@link KeyVaultRoleScope}.
* @throws IllegalArgumentException If the given {@link String URL String} is malformed.
*/
/**
* Creates or finds a {@link KeyVaultRoleScope} from its string representation.
*
* @param url A URL containing the name of the scope to look for.
* @return The corresponding {@link KeyVaultRoleScope}.
*/
public static KeyVaultRoleScope fromUrl(URL url) {
return fromString(url.getPath(), KeyVaultRoleScope.class);
}
} |
I was thinking the service would always send US formatted datetimes, but i'm not sure | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US);
DateTimeFormatter dtf_windows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US);
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf_windows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US); | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX");
DateTimeFormatter dtfWindows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX");
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtfWindows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
private MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
public MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} |
Test/sample code. As `data` is of type [`object`](https://github.com/Azure/azure-rest-api-specs/blob/master/specification/resourcegraph/resource-manager/Microsoft.ResourceGraph/stable/2019-04-01/resourcegraph.json#L292-L295), it is not easy to use the result in Java. | public void testResourceGraph() {
QueryRequest queryRequest = new QueryRequest();
queryRequest.withSubscriptions(Arrays.asList(subscription));
queryRequest.withQuery("Resources | project name, type | order by name asc | limit 5");
QueryResponse queryResponse = resourceGraphManager.resourceProviders().resources(queryRequest);
Assert.assertTrue(queryResponse.count() > 0);
Assert.assertNotNull(queryResponse.data());
Assert.assertTrue(queryResponse.data() instanceof Map);
Map<String, Object> dataAsDict = (Map<String, Object>) queryResponse.data();
Assert.assertTrue(dataAsDict.containsKey("columns"));
Assert.assertTrue(dataAsDict.containsKey("rows"));
List<String> columns = (List<String>) dataAsDict.get("columns");
List<String> rows = (List<String>) dataAsDict.get("columns");
Assert.assertEquals(2, columns.size());
Assert.assertTrue(rows.size() > 0);
} | } | public void testResourceGraph() {
QueryRequest queryRequest = new QueryRequest();
queryRequest.withSubscriptions(Arrays.asList(subscription));
queryRequest.withQuery("Resources | project name, type | order by name asc | limit 5");
queryRequest.withOptions(new QueryRequestOptions().withResultFormat(ResultFormat.TABLE));
QueryResponse queryResponse = resourceGraphManager.resourceProviders().resources(queryRequest);
Assert.assertTrue(queryResponse.count() > 0);
Assert.assertNotNull(queryResponse.data());
Assert.assertTrue(queryResponse.data() instanceof Map);
Map<String, Object> dataAsDict = (Map<String, Object>) queryResponse.data();
Assert.assertTrue(dataAsDict.containsKey("columns"));
Assert.assertTrue(dataAsDict.containsKey("rows"));
List<String> columns = (List<String>) dataAsDict.get("columns");
List<String> rows = (List<String>) dataAsDict.get("columns");
Assert.assertEquals(2, columns.size());
Assert.assertTrue(rows.size() > 0);
queryRequest.withOptions(new QueryRequestOptions().withResultFormat(ResultFormat.OBJECT_ARRAY));
queryResponse = resourceGraphManager.resourceProviders().resources(queryRequest);
Assert.assertTrue(queryResponse.count() > 0);
Assert.assertTrue(queryResponse.data() instanceof List);
List<Object> dataAsList = (List<Object>) queryResponse.data();
Map<String, String> itemAsDict = (Map<String, String>) dataAsList.iterator().next();
Assert.assertTrue(itemAsDict.containsKey("name"));
Assert.assertTrue(itemAsDict.containsKey("type"));
} | class ResourceGraphTests extends TestBase {
private ResourceGraphManager resourceGraphManager;
private String subscription;
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceGraphManager = ResourceGraphManager
.authenticate(restClient);
subscription = defaultSubscription;
}
@Override
protected void cleanUpResources() {
}
@Test
@Ignore
} | class ResourceGraphTests extends TestBase {
private ResourceGraphManager resourceGraphManager;
private String subscription;
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceGraphManager = ResourceGraphManager
.authenticate(restClient);
subscription = defaultSubscription;
}
@Override
protected void cleanUpResources() {
}
@Test
@Ignore
} |
I just wanted to make sure we deserialize properly but checking the value itself should tell us if there's changes on the service-side of things. It should be `0` by default from what I've seen. | public void createKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(createdKey -> {
assertKeyEquals(keyToCreate, createdKey);
assertNotNull(createdKey.getProperties().getHsmPlatform());
})
.verifyComplete());
} | assertNotNull(createdKey.getProperties().getHsmPlatform()); | public void createKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(createdKey -> {
assertKeyEquals(keyToCreate, createdKey);
assertEquals("0", createdKey.getProperties().getHsmPlatform());
})
.verifyComplete());
} | class KeyAsyncClientTest extends KeyClientTestBase {
protected KeyAsyncClient keyAsyncClient;
@Override
protected void beforeTest() {
beforeTestSetup();
}
protected void createKeyAsyncClient(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion, null);
}
protected void createKeyAsyncClient(HttpClient httpClient, KeyServiceVersion serviceVersion, String testTenantId) {
keyAsyncClient = getKeyClientBuilder(buildAsyncAssertingClient(
interceptorManager.isPlaybackMode() ? interceptorManager.getPlaybackClient() : httpClient), testTenantId,
getEndpoint(), serviceVersion)
.buildAsyncClient();
}
/**
* Tests that a key can be created in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
/**
* Tests that a key can be created in the key vault while using a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyWithMultipleTenants(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion, testResourceNamer.randomUuid());
createKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete());
KeyVaultCredentialPolicy.clearCache();
createKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete());
}
/**
* Tests that a RSA key created.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createRsaKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createRsaKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createRsaKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete());
}
/**
* Tests that we cannot create a key when the key is an empty string.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyEmptyName(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
final KeyType keyType;
if (runManagedHsmTest) {
keyType = KeyType.RSA_HSM;
} else {
keyType = KeyType.RSA;
}
StepVerifier.create(keyAsyncClient.createKey("", keyType))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST));
}
/**
* Tests that we can create keys when value is not null or an empty string.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyNullType(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createKeyEmptyValueRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST)));
}
/**
* Verifies that an exception is thrown when null key object is passed for creation.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyNull(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.createKey(null))
.verifyError(NullPointerException.class);
}
/**
* Tests that a key is able to be updated when it exists.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void updateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
updateKeyRunner((originalKey, updatedKey) -> StepVerifier.create(keyAsyncClient.createKey(originalKey)
.flatMap(response -> {
assertKeyEquals(originalKey, response);
return keyAsyncClient.updateKeyProperties(response.getProperties()
.setExpiresOn(updatedKey.getExpiresOn()));
}))
.assertNext(response -> assertKeyEquals(updatedKey, response))
.verifyComplete());
}
/**
* Tests that a key is not able to be updated when it is disabled. 403 error is expected.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void updateDisabledKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
updateDisabledKeyRunner((originalKey, updatedKey) -> StepVerifier.create(keyAsyncClient.createKey(originalKey)
.flatMap(response -> {
assertKeyEquals(originalKey, response);
return keyAsyncClient.updateKeyProperties(response.getProperties()
.setExpiresOn(updatedKey.getExpiresOn()));
}))
.assertNext(response -> assertKeyEquals(updatedKey, response))
.verifyComplete());
}
/**
* Tests that an existing key can be retrieved.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getKeyRunner((keyToSetAndGet) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToSetAndGet))
.assertNext(createdKey -> {
assertKeyEquals(keyToSetAndGet, createdKey);
assertNotNull(createdKey.getProperties().getHsmPlatform());
})
.verifyComplete();
StepVerifier.create(keyAsyncClient.getKey(keyToSetAndGet.getName()))
.assertNext(retrievedKey -> {
assertKeyEquals(keyToSetAndGet, retrievedKey);
assertNotNull(retrievedKey.getProperties().getHsmPlatform());
})
.verifyComplete();
});
}
/**
* Tests that a specific version of the key can be retrieved.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getKeySpecificVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getKeySpecificVersionRunner((keyWithOriginalValue, keyWithNewValue) -> {
StepVerifier.create(keyAsyncClient.createKey(keyWithOriginalValue).flatMap(keyVersionOne ->
keyAsyncClient.getKey(keyWithOriginalValue.getName(), keyVersionOne.getProperties().getVersion())))
.assertNext(response -> assertKeyEquals(keyWithOriginalValue, response))
.verifyComplete();
StepVerifier.create(keyAsyncClient.createKey(keyWithNewValue).flatMap(keyVersionTwo ->
keyAsyncClient.getKey(keyWithNewValue.getName(), keyVersionTwo.getProperties().getVersion())))
.assertNext(response -> assertKeyEquals(keyWithNewValue, response))
.verifyComplete();
});
}
/**
* Tests that an attempt to get a non-existing key throws an error.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.getKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that an existing key can be deleted.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void deleteKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
deleteKeyRunner((keyToDelete) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToDelete))
.assertNext(keyResponse -> assertKeyEquals(keyToDelete, keyResponse)).verifyComplete();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToDelete.getName()));
StepVerifier.create(poller.last().map(AsyncPollResponse::getValue))
.assertNext(deletedKeyResponse -> {
assertNotNull(deletedKeyResponse.getDeletedOn());
assertNotNull(deletedKeyResponse.getRecoveryId());
assertNotNull(deletedKeyResponse.getScheduledPurgeDate());
assertEquals(keyToDelete.getName(), deletedKeyResponse.getName());
})
.verifyComplete();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void deleteKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.beginDeleteKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that an attempt to retrieve a non existing deleted key throws an error on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.getDeletedKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that a deleted key can be recovered on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void recoverDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
recoverDeletedKeyRunner((keyToDeleteAndRecover) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToDeleteAndRecover))
.assertNext(keyResponse -> assertKeyEquals(keyToDeleteAndRecover, keyResponse))
.verifyComplete();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToDeleteAndRecover.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
PollerFlux<KeyVaultKey, Void> recoverPoller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginRecoverDeletedKey(keyToDeleteAndRecover.getName()));
StepVerifier.create(recoverPoller.last().map(AsyncPollResponse::getValue))
.assertNext(keyResponse -> {
assertEquals(keyToDeleteAndRecover.getName(), keyResponse.getName());
assertEquals(keyToDeleteAndRecover.getNotBefore(), keyResponse.getProperties().getNotBefore());
assertEquals(keyToDeleteAndRecover.getExpiresOn(), keyResponse.getProperties().getExpiresOn());
})
.verifyComplete();
});
}
/**
* Tests that an attempt to recover a non existing deleted key throws an error on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void recoverDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.beginRecoverDeletedKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that a key can be backed up in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void backupKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
backupKeyRunner((keyToBackup) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToBackup))
.assertNext(keyResponse -> assertKeyEquals(keyToBackup, keyResponse)).verifyComplete();
StepVerifier.create(keyAsyncClient.backupKey(keyToBackup.getName()))
.assertNext(response -> {
assertNotNull(response);
assertTrue(response.length > 0);
}).verifyComplete();
});
}
/**
* Tests that an attempt to backup a non existing key throws an error.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void backupKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.backupKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that a key can be backed up in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void restoreKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
restoreKeyRunner((keyToBackupAndRestore) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToBackupAndRestore))
.assertNext(keyResponse -> assertKeyEquals(keyToBackupAndRestore, keyResponse))
.verifyComplete();
byte[] backup = keyAsyncClient.backupKey(keyToBackupAndRestore.getName()).block();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToBackupAndRestore.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
StepVerifier.create(keyAsyncClient.purgeDeletedKeyWithResponse(keyToBackupAndRestore.getName()))
.assertNext(voidResponse ->
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, voidResponse.getStatusCode()))
.verifyComplete();
pollOnKeyPurge(keyToBackupAndRestore.getName());
sleepIfRunningAgainstService(60000);
StepVerifier.create(keyAsyncClient.restoreKeyBackup(backup))
.assertNext(response -> {
assertEquals(keyToBackupAndRestore.getName(), response.getName());
assertEquals(keyToBackupAndRestore.getNotBefore(), response.getProperties().getNotBefore());
assertEquals(keyToBackupAndRestore.getExpiresOn(), response.getProperties().getExpiresOn());
}).verifyComplete();
});
}
/**
* Tests that an attempt to restore a key from malformed backup bytes throws an error.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void restoreKeyFromMalformedBackup(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
byte[] keyBackupBytes = "non-existing".getBytes();
StepVerifier.create(keyAsyncClient.restoreKeyBackup(keyBackupBytes))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST));
}
/**
* Tests that a deleted key can be retrieved on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getDeletedKeyRunner((keyToDeleteAndGet) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToDeleteAndGet))
.assertNext(keyResponse -> assertKeyEquals(keyToDeleteAndGet, keyResponse))
.verifyComplete();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToDeleteAndGet.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
StepVerifier.create(keyAsyncClient.getDeletedKey(keyToDeleteAndGet.getName()))
.assertNext(deletedKeyResponse -> {
assertNotNull(deletedKeyResponse.getDeletedOn());
assertNotNull(deletedKeyResponse.getRecoveryId());
assertNotNull(deletedKeyResponse.getScheduledPurgeDate());
assertEquals(keyToDeleteAndGet.getName(), deletedKeyResponse.getName());
}).verifyComplete();
});
}
/**
* Tests that deleted keys can be listed in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void listDeletedKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
if (interceptorManager.isLiveMode()) {
return;
}
listDeletedKeysRunner((keysToList) -> {
for (CreateKeyOptions key : keysToList.values()) {
StepVerifier.create(keyAsyncClient.createKey(key))
.assertNext(keyResponse -> assertKeyEquals(key, keyResponse)).verifyComplete();
}
sleepIfRunningAgainstService(10000);
for (CreateKeyOptions key : keysToList.values()) {
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(key.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
}
sleepIfRunningAgainstService(90000);
StepVerifier.create(keyAsyncClient.listDeletedKeys()
.doOnNext(actualKey -> {
assertNotNull(actualKey.getDeletedOn());
assertNotNull(actualKey.getRecoveryId());
}).last())
.expectNextCount(1)
.verifyComplete();
});
}
/**
* Tests that key versions can be listed in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void listKeyVersions(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
listKeyVersionsRunner((keysToList) -> {
String keyName = null;
for (CreateKeyOptions key : keysToList) {
keyName = key.getName();
StepVerifier.create(keyAsyncClient.createKey(key))
.assertNext(keyResponse -> assertKeyEquals(key, keyResponse))
.verifyComplete();
}
sleepIfRunningAgainstService(30000);
StepVerifier.create(keyAsyncClient.listPropertiesOfKeyVersions(keyName).collectList())
.assertNext(actualKeys -> assertEquals(keysToList.size(), actualKeys.size()))
.verifyComplete();
});
}
/**
* Tests that keys can be listed in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void listKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
listKeysRunner((keysToList) -> {
for (CreateKeyOptions key : keysToList.values()) {
StepVerifier.create(keyAsyncClient.createKey(key))
.assertNext(keyResponse -> assertKeyEquals(key, keyResponse))
.verifyComplete();
}
sleepIfRunningAgainstService(10000);
StepVerifier.create(keyAsyncClient.listPropertiesOfKeys().map(actualKey -> {
if (keysToList.containsKey(actualKey.getName())) {
CreateKeyOptions expectedKey = keysToList.get(actualKey.getName());
assertEquals(expectedKey.getExpiresOn(), actualKey.getExpiresOn());
assertEquals(expectedKey.getNotBefore(), actualKey.getNotBefore());
keysToList.remove(actualKey.getName());
}
return actualKey;
}).last())
.expectNextCount(1)
.verifyComplete();
assertEquals(0, keysToList.size());
});
}
/**
* Tests that an existing key can be released.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void releaseKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(runManagedHsmTest && runReleaseKeyTest);
createKeyAsyncClient(httpClient, serviceVersion);
releaseKeyRunner((keyToRelease, attestationUrl) -> {
StepVerifier.create(keyAsyncClient.createRsaKey(keyToRelease))
.assertNext(keyResponse -> assertKeyEquals(keyToRelease, keyResponse))
.verifyComplete();
String targetAttestationToken = "testAttestationToken";
if (getTestMode() != TestMode.PLAYBACK) {
if (!attestationUrl.endsWith("/")) {
attestationUrl = attestationUrl + "/";
}
targetAttestationToken = getAttestationToken(attestationUrl + "generate-test-token");
}
StepVerifier.create(keyAsyncClient.releaseKey(keyToRelease.getName(), targetAttestationToken))
.assertNext(releaseKeyResult -> assertNotNull(releaseKeyResult.getValue()))
.expectComplete()
.verify();
});
}
/**
* Tests that fetching the key rotation policy of a non-existent key throws.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void getKeyRotationPolicyOfNonExistentKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.getKeyRotationPolicy(testResourceNamer.randomName("nonExistentKey", 20)))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that fetching the key rotation policy of a non-existent key throws.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void getKeyRotationPolicyWithNoPolicySet(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
String keyName = testResourceNamer.randomName("rotateKey", 20);
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName)))
.assertNext(Assertions::assertNotNull)
.verifyComplete();
StepVerifier.create(keyAsyncClient.getKeyRotationPolicy(keyName))
.assertNext(keyRotationPolicy -> {
assertNotNull(keyRotationPolicy);
assertNull(keyRotationPolicy.getId());
assertNull(keyRotationPolicy.getCreatedOn());
assertNull(keyRotationPolicy.getUpdatedOn());
assertNull(keyRotationPolicy.getExpiresIn());
assertEquals(1, keyRotationPolicy.getLifetimeActions().size());
assertEquals(KeyRotationPolicyAction.NOTIFY, keyRotationPolicy.getLifetimeActions().get(0).getAction());
assertEquals("P30D", keyRotationPolicy.getLifetimeActions().get(0).getTimeBeforeExpiry());
assertNull(keyRotationPolicy.getLifetimeActions().get(0).getTimeAfterCreate());
}).verifyComplete();
}
/**
* Tests that fetching the key rotation policy of a non-existent key throws.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@Disabled("Disable after https:
public void updateGetKeyRotationPolicyWithMinimumProperties(HttpClient httpClient,
KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
updateGetKeyRotationPolicyWithMinimumPropertiesRunner((keyName, keyRotationPolicy) -> {
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName)))
.assertNext(Assertions::assertNotNull)
.verifyComplete();
StepVerifier.create(keyAsyncClient.updateKeyRotationPolicy(keyName, keyRotationPolicy)
.flatMap(updatedKeyRotationPolicy -> Mono.zip(Mono.just(updatedKeyRotationPolicy),
keyAsyncClient.getKeyRotationPolicy(keyName))))
.assertNext(tuple -> assertKeyVaultRotationPolicyEquals(tuple.getT1(), tuple.getT2()))
.verifyComplete();
});
}
/**
* Tests that an key rotation policy can be updated with all possible properties, then retrieves it.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void updateGetKeyRotationPolicyWithAllProperties(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
updateGetKeyRotationPolicyWithAllPropertiesRunner((keyName, keyRotationPolicy) -> {
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName)))
.assertNext(Assertions::assertNotNull)
.verifyComplete();
StepVerifier.create(keyAsyncClient.updateKeyRotationPolicy(keyName, keyRotationPolicy)
.flatMap(updatedKeyRotationPolicy -> Mono.zip(Mono.just(updatedKeyRotationPolicy),
keyAsyncClient.getKeyRotationPolicy(keyName))))
.assertNext(tuple -> assertKeyVaultRotationPolicyEquals(tuple.getT1(), tuple.getT2()))
.verifyComplete();
});
}
/**
* Tests that a key can be rotated.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void rotateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
String keyName = testResourceNamer.randomName("rotateKey", 20);
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName))
.flatMap(createdKey -> Mono.zip(Mono.just(createdKey),
keyAsyncClient.rotateKey(keyName))))
.assertNext(tuple -> {
KeyVaultKey createdKey = tuple.getT1();
KeyVaultKey rotatedKey = tuple.getT2();
assertEquals(createdKey.getName(), rotatedKey.getName());
assertEquals(createdKey.getProperties().getTags(), rotatedKey.getProperties().getTags());
}).verifyComplete();
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key using a {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClient(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient = keyAsyncClient.getCryptographyAsyncClient("myKey");
assertNotNull(cryptographyAsyncClient);
}
/**
* Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. Also tests
* that cryptographic operations can be performed with said cryptography client.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientAndEncryptDecrypt(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createKeyRunner((keyToCreate) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete();
CryptographyAsyncClient cryptographyAsyncClient =
keyAsyncClient.getCryptographyAsyncClient(keyToCreate.getName());
assertNotNull(cryptographyAsyncClient);
byte[] plaintext = "myPlaintext".getBytes();
StepVerifier.create(cryptographyAsyncClient.encrypt(EncryptionAlgorithm.RSA_OAEP, plaintext)
.map(EncryptResult::getCipherText)
.flatMap(ciphertext -> cryptographyAsyncClient.decrypt(EncryptionAlgorithm.RSA_OAEP, ciphertext)
.map(DecryptResult::getPlainText)))
.assertNext(decryptedText -> assertArrayEquals(plaintext, decryptedText))
.verifyComplete();
});
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key and version using a
* {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientWithKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient =
keyAsyncClient.getCryptographyAsyncClient("myKey", "6A385B124DEF4096AF1361A85B16C204");
assertNotNull(cryptographyAsyncClient);
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key using a {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientWithEmptyKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient = keyAsyncClient.getCryptographyAsyncClient("myKey", "");
assertNotNull(cryptographyAsyncClient);
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key using a {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientWithNullKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient = keyAsyncClient.getCryptographyAsyncClient("myKey", null);
assertNotNull(cryptographyAsyncClient);
}
private void pollOnKeyPurge(String keyName) {
int pendingPollCount = 0;
while (pendingPollCount < 10) {
DeletedKey deletedKey = null;
try {
deletedKey = keyAsyncClient.getDeletedKey(keyName).block();
} catch (ResourceNotFoundException ignored) {
}
if (deletedKey != null) {
sleepIfRunningAgainstService(2000);
pendingPollCount += 1;
} else {
return;
}
}
System.err.printf("Deleted Key %s was not purged \n", keyName);
}
} | class KeyAsyncClientTest extends KeyClientTestBase {
protected KeyAsyncClient keyAsyncClient;
@Override
protected void beforeTest() {
beforeTestSetup();
}
protected void createKeyAsyncClient(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion, null);
}
protected void createKeyAsyncClient(HttpClient httpClient, KeyServiceVersion serviceVersion, String testTenantId) {
keyAsyncClient = getKeyClientBuilder(buildAsyncAssertingClient(
interceptorManager.isPlaybackMode() ? interceptorManager.getPlaybackClient() : httpClient), testTenantId,
getEndpoint(), serviceVersion)
.buildAsyncClient();
}
/**
* Tests that a key can be created in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
/**
* Tests that a key can be created in the key vault while using a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyWithMultipleTenants(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion, testResourceNamer.randomUuid());
createKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete());
KeyVaultCredentialPolicy.clearCache();
createKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete());
}
/**
* Tests that a RSA key created.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createRsaKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createRsaKeyRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createRsaKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete());
}
/**
* Tests that we cannot create a key when the key is an empty string.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyEmptyName(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
final KeyType keyType;
if (runManagedHsmTest) {
keyType = KeyType.RSA_HSM;
} else {
keyType = KeyType.RSA;
}
StepVerifier.create(keyAsyncClient.createKey("", keyType))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST));
}
/**
* Tests that we can create keys when value is not null or an empty string.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyNullType(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createKeyEmptyValueRunner((keyToCreate) ->
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST)));
}
/**
* Verifies that an exception is thrown when null key object is passed for creation.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createKeyNull(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.createKey(null))
.verifyError(NullPointerException.class);
}
/**
* Tests that a key is able to be updated when it exists.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void updateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
updateKeyRunner((originalKey, updatedKey) -> StepVerifier.create(keyAsyncClient.createKey(originalKey)
.flatMap(response -> {
assertKeyEquals(originalKey, response);
return keyAsyncClient.updateKeyProperties(response.getProperties()
.setExpiresOn(updatedKey.getExpiresOn()));
}))
.assertNext(response -> assertKeyEquals(updatedKey, response))
.verifyComplete());
}
/**
* Tests that a key is not able to be updated when it is disabled. 403 error is expected.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void updateDisabledKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
updateDisabledKeyRunner((originalKey, updatedKey) -> StepVerifier.create(keyAsyncClient.createKey(originalKey)
.flatMap(response -> {
assertKeyEquals(originalKey, response);
return keyAsyncClient.updateKeyProperties(response.getProperties()
.setExpiresOn(updatedKey.getExpiresOn()));
}))
.assertNext(response -> assertKeyEquals(updatedKey, response))
.verifyComplete());
}
/**
* Tests that an existing key can be retrieved.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getKeyRunner((keyToSetAndGet) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToSetAndGet))
.assertNext(createdKey -> {
assertKeyEquals(keyToSetAndGet, createdKey);
assertEquals("0", createdKey.getProperties().getHsmPlatform());
})
.verifyComplete();
StepVerifier.create(keyAsyncClient.getKey(keyToSetAndGet.getName()))
.assertNext(retrievedKey -> {
assertKeyEquals(keyToSetAndGet, retrievedKey);
assertEquals("0", retrievedKey.getProperties().getHsmPlatform());
})
.verifyComplete();
});
}
/**
* Tests that a specific version of the key can be retrieved.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getKeySpecificVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getKeySpecificVersionRunner((keyWithOriginalValue, keyWithNewValue) -> {
StepVerifier.create(keyAsyncClient.createKey(keyWithOriginalValue).flatMap(keyVersionOne ->
keyAsyncClient.getKey(keyWithOriginalValue.getName(), keyVersionOne.getProperties().getVersion())))
.assertNext(response -> assertKeyEquals(keyWithOriginalValue, response))
.verifyComplete();
StepVerifier.create(keyAsyncClient.createKey(keyWithNewValue).flatMap(keyVersionTwo ->
keyAsyncClient.getKey(keyWithNewValue.getName(), keyVersionTwo.getProperties().getVersion())))
.assertNext(response -> assertKeyEquals(keyWithNewValue, response))
.verifyComplete();
});
}
/**
* Tests that an attempt to get a non-existing key throws an error.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.getKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that an existing key can be deleted.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void deleteKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
deleteKeyRunner((keyToDelete) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToDelete))
.assertNext(keyResponse -> assertKeyEquals(keyToDelete, keyResponse)).verifyComplete();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToDelete.getName()));
StepVerifier.create(poller.last().map(AsyncPollResponse::getValue))
.assertNext(deletedKeyResponse -> {
assertNotNull(deletedKeyResponse.getDeletedOn());
assertNotNull(deletedKeyResponse.getRecoveryId());
assertNotNull(deletedKeyResponse.getScheduledPurgeDate());
assertEquals(keyToDelete.getName(), deletedKeyResponse.getName());
})
.verifyComplete();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void deleteKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.beginDeleteKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that an attempt to retrieve a non existing deleted key throws an error on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.getDeletedKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that a deleted key can be recovered on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void recoverDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
recoverDeletedKeyRunner((keyToDeleteAndRecover) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToDeleteAndRecover))
.assertNext(keyResponse -> assertKeyEquals(keyToDeleteAndRecover, keyResponse))
.verifyComplete();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToDeleteAndRecover.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
PollerFlux<KeyVaultKey, Void> recoverPoller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginRecoverDeletedKey(keyToDeleteAndRecover.getName()));
StepVerifier.create(recoverPoller.last().map(AsyncPollResponse::getValue))
.assertNext(keyResponse -> {
assertEquals(keyToDeleteAndRecover.getName(), keyResponse.getName());
assertEquals(keyToDeleteAndRecover.getNotBefore(), keyResponse.getProperties().getNotBefore());
assertEquals(keyToDeleteAndRecover.getExpiresOn(), keyResponse.getProperties().getExpiresOn());
})
.verifyComplete();
});
}
/**
* Tests that an attempt to recover a non existing deleted key throws an error on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void recoverDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.beginRecoverDeletedKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that a key can be backed up in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void backupKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
backupKeyRunner((keyToBackup) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToBackup))
.assertNext(keyResponse -> assertKeyEquals(keyToBackup, keyResponse)).verifyComplete();
StepVerifier.create(keyAsyncClient.backupKey(keyToBackup.getName()))
.assertNext(response -> {
assertNotNull(response);
assertTrue(response.length > 0);
}).verifyComplete();
});
}
/**
* Tests that an attempt to backup a non existing key throws an error.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void backupKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.backupKey("non-existing"))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that a key can be backed up in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void restoreKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
restoreKeyRunner((keyToBackupAndRestore) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToBackupAndRestore))
.assertNext(keyResponse -> assertKeyEquals(keyToBackupAndRestore, keyResponse))
.verifyComplete();
byte[] backup = keyAsyncClient.backupKey(keyToBackupAndRestore.getName()).block();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToBackupAndRestore.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
StepVerifier.create(keyAsyncClient.purgeDeletedKeyWithResponse(keyToBackupAndRestore.getName()))
.assertNext(voidResponse ->
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, voidResponse.getStatusCode()))
.verifyComplete();
pollOnKeyPurge(keyToBackupAndRestore.getName());
sleepIfRunningAgainstService(60000);
StepVerifier.create(keyAsyncClient.restoreKeyBackup(backup))
.assertNext(response -> {
assertEquals(keyToBackupAndRestore.getName(), response.getName());
assertEquals(keyToBackupAndRestore.getNotBefore(), response.getProperties().getNotBefore());
assertEquals(keyToBackupAndRestore.getExpiresOn(), response.getProperties().getExpiresOn());
}).verifyComplete();
});
}
/**
* Tests that an attempt to restore a key from malformed backup bytes throws an error.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void restoreKeyFromMalformedBackup(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
byte[] keyBackupBytes = "non-existing".getBytes();
StepVerifier.create(keyAsyncClient.restoreKeyBackup(keyBackupBytes))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST));
}
/**
* Tests that a deleted key can be retrieved on a soft-delete enabled vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getDeletedKeyRunner((keyToDeleteAndGet) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToDeleteAndGet))
.assertNext(keyResponse -> assertKeyEquals(keyToDeleteAndGet, keyResponse))
.verifyComplete();
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(keyToDeleteAndGet.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
StepVerifier.create(keyAsyncClient.getDeletedKey(keyToDeleteAndGet.getName()))
.assertNext(deletedKeyResponse -> {
assertNotNull(deletedKeyResponse.getDeletedOn());
assertNotNull(deletedKeyResponse.getRecoveryId());
assertNotNull(deletedKeyResponse.getScheduledPurgeDate());
assertEquals(keyToDeleteAndGet.getName(), deletedKeyResponse.getName());
}).verifyComplete();
});
}
/**
* Tests that deleted keys can be listed in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void listDeletedKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
if (interceptorManager.isLiveMode()) {
return;
}
listDeletedKeysRunner((keysToList) -> {
for (CreateKeyOptions key : keysToList.values()) {
StepVerifier.create(keyAsyncClient.createKey(key))
.assertNext(keyResponse -> assertKeyEquals(key, keyResponse)).verifyComplete();
}
sleepIfRunningAgainstService(10000);
for (CreateKeyOptions key : keysToList.values()) {
PollerFlux<DeletedKey, Void> poller = setPlaybackPollerFluxPollInterval(
keyAsyncClient.beginDeleteKey(key.getName()));
StepVerifier.create(poller.last())
.expectNextCount(1)
.verifyComplete();
}
sleepIfRunningAgainstService(90000);
StepVerifier.create(keyAsyncClient.listDeletedKeys()
.doOnNext(actualKey -> {
assertNotNull(actualKey.getDeletedOn());
assertNotNull(actualKey.getRecoveryId());
}).last())
.expectNextCount(1)
.verifyComplete();
});
}
/**
* Tests that key versions can be listed in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void listKeyVersions(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
listKeyVersionsRunner((keysToList) -> {
String keyName = null;
for (CreateKeyOptions key : keysToList) {
keyName = key.getName();
StepVerifier.create(keyAsyncClient.createKey(key))
.assertNext(keyResponse -> assertKeyEquals(key, keyResponse))
.verifyComplete();
}
sleepIfRunningAgainstService(30000);
StepVerifier.create(keyAsyncClient.listPropertiesOfKeyVersions(keyName).collectList())
.assertNext(actualKeys -> assertEquals(keysToList.size(), actualKeys.size()))
.verifyComplete();
});
}
/**
* Tests that keys can be listed in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void listKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
listKeysRunner((keysToList) -> {
for (CreateKeyOptions key : keysToList.values()) {
StepVerifier.create(keyAsyncClient.createKey(key))
.assertNext(keyResponse -> assertKeyEquals(key, keyResponse))
.verifyComplete();
}
sleepIfRunningAgainstService(10000);
StepVerifier.create(keyAsyncClient.listPropertiesOfKeys().map(actualKey -> {
if (keysToList.containsKey(actualKey.getName())) {
CreateKeyOptions expectedKey = keysToList.get(actualKey.getName());
assertEquals(expectedKey.getExpiresOn(), actualKey.getExpiresOn());
assertEquals(expectedKey.getNotBefore(), actualKey.getNotBefore());
keysToList.remove(actualKey.getName());
}
return actualKey;
}).last())
.expectNextCount(1)
.verifyComplete();
assertEquals(0, keysToList.size());
});
}
/**
* Tests that an existing key can be released.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void releaseKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(runManagedHsmTest && runReleaseKeyTest);
createKeyAsyncClient(httpClient, serviceVersion);
releaseKeyRunner((keyToRelease, attestationUrl) -> {
StepVerifier.create(keyAsyncClient.createRsaKey(keyToRelease))
.assertNext(keyResponse -> assertKeyEquals(keyToRelease, keyResponse))
.verifyComplete();
String targetAttestationToken = "testAttestationToken";
if (getTestMode() != TestMode.PLAYBACK) {
if (!attestationUrl.endsWith("/")) {
attestationUrl = attestationUrl + "/";
}
targetAttestationToken = getAttestationToken(attestationUrl + "generate-test-token");
}
StepVerifier.create(keyAsyncClient.releaseKey(keyToRelease.getName(), targetAttestationToken))
.assertNext(releaseKeyResult -> assertNotNull(releaseKeyResult.getValue()))
.expectComplete()
.verify();
});
}
/**
* Tests that fetching the key rotation policy of a non-existent key throws.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void getKeyRotationPolicyOfNonExistentKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
StepVerifier.create(keyAsyncClient.getKeyRotationPolicy(testResourceNamer.randomName("nonExistentKey", 20)))
.verifyErrorSatisfies(e ->
assertRestException(e, ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND));
}
/**
* Tests that fetching the key rotation policy of a non-existent key throws.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void getKeyRotationPolicyWithNoPolicySet(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
String keyName = testResourceNamer.randomName("rotateKey", 20);
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName)))
.assertNext(Assertions::assertNotNull)
.verifyComplete();
StepVerifier.create(keyAsyncClient.getKeyRotationPolicy(keyName))
.assertNext(keyRotationPolicy -> {
assertNotNull(keyRotationPolicy);
assertNull(keyRotationPolicy.getId());
assertNull(keyRotationPolicy.getCreatedOn());
assertNull(keyRotationPolicy.getUpdatedOn());
assertNull(keyRotationPolicy.getExpiresIn());
assertEquals(1, keyRotationPolicy.getLifetimeActions().size());
assertEquals(KeyRotationPolicyAction.NOTIFY, keyRotationPolicy.getLifetimeActions().get(0).getAction());
assertEquals("P30D", keyRotationPolicy.getLifetimeActions().get(0).getTimeBeforeExpiry());
assertNull(keyRotationPolicy.getLifetimeActions().get(0).getTimeAfterCreate());
}).verifyComplete();
}
/**
* Tests that fetching the key rotation policy of a non-existent key throws.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@Disabled("Disable after https:
public void updateGetKeyRotationPolicyWithMinimumProperties(HttpClient httpClient,
KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
updateGetKeyRotationPolicyWithMinimumPropertiesRunner((keyName, keyRotationPolicy) -> {
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName)))
.assertNext(Assertions::assertNotNull)
.verifyComplete();
StepVerifier.create(keyAsyncClient.updateKeyRotationPolicy(keyName, keyRotationPolicy)
.flatMap(updatedKeyRotationPolicy -> Mono.zip(Mono.just(updatedKeyRotationPolicy),
keyAsyncClient.getKeyRotationPolicy(keyName))))
.assertNext(tuple -> assertKeyVaultRotationPolicyEquals(tuple.getT1(), tuple.getT2()))
.verifyComplete();
});
}
/**
* Tests that an key rotation policy can be updated with all possible properties, then retrieves it.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void updateGetKeyRotationPolicyWithAllProperties(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
updateGetKeyRotationPolicyWithAllPropertiesRunner((keyName, keyRotationPolicy) -> {
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName)))
.assertNext(Assertions::assertNotNull)
.verifyComplete();
StepVerifier.create(keyAsyncClient.updateKeyRotationPolicy(keyName, keyRotationPolicy)
.flatMap(updatedKeyRotationPolicy -> Mono.zip(Mono.just(updatedKeyRotationPolicy),
keyAsyncClient.getKeyRotationPolicy(keyName))))
.assertNext(tuple -> assertKeyVaultRotationPolicyEquals(tuple.getT1(), tuple.getT2()))
.verifyComplete();
});
}
/**
* Tests that a key can be rotated.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
@DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true")
public void rotateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
Assumptions.assumeTrue(!isHsmEnabled);
createKeyAsyncClient(httpClient, serviceVersion);
String keyName = testResourceNamer.randomName("rotateKey", 20);
StepVerifier.create(keyAsyncClient.createRsaKey(new CreateRsaKeyOptions(keyName))
.flatMap(createdKey -> Mono.zip(Mono.just(createdKey),
keyAsyncClient.rotateKey(keyName))))
.assertNext(tuple -> {
KeyVaultKey createdKey = tuple.getT1();
KeyVaultKey rotatedKey = tuple.getT2();
assertEquals(createdKey.getName(), rotatedKey.getName());
assertEquals(createdKey.getProperties().getTags(), rotatedKey.getProperties().getTags());
}).verifyComplete();
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key using a {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClient(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient = keyAsyncClient.getCryptographyAsyncClient("myKey");
assertNotNull(cryptographyAsyncClient);
}
/**
* Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. Also tests
* that cryptographic operations can be performed with said cryptography client.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientAndEncryptDecrypt(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createKeyRunner((keyToCreate) -> {
StepVerifier.create(keyAsyncClient.createKey(keyToCreate))
.assertNext(response -> assertKeyEquals(keyToCreate, response))
.verifyComplete();
CryptographyAsyncClient cryptographyAsyncClient =
keyAsyncClient.getCryptographyAsyncClient(keyToCreate.getName());
assertNotNull(cryptographyAsyncClient);
byte[] plaintext = "myPlaintext".getBytes();
StepVerifier.create(cryptographyAsyncClient.encrypt(EncryptionAlgorithm.RSA_OAEP, plaintext)
.map(EncryptResult::getCipherText)
.flatMap(ciphertext -> cryptographyAsyncClient.decrypt(EncryptionAlgorithm.RSA_OAEP, ciphertext)
.map(DecryptResult::getPlainText)))
.assertNext(decryptedText -> assertArrayEquals(plaintext, decryptedText))
.verifyComplete();
});
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key and version using a
* {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientWithKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient =
keyAsyncClient.getCryptographyAsyncClient("myKey", "6A385B124DEF4096AF1361A85B16C204");
assertNotNull(cryptographyAsyncClient);
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key using a {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientWithEmptyKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient = keyAsyncClient.getCryptographyAsyncClient("myKey", "");
assertNotNull(cryptographyAsyncClient);
}
/**
* Tests that a {@link CryptographyAsyncClient} can be created for a given key using a {@link KeyAsyncClient}.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getCryptographyAsyncClientWithNullKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
CryptographyAsyncClient cryptographyAsyncClient = keyAsyncClient.getCryptographyAsyncClient("myKey", null);
assertNotNull(cryptographyAsyncClient);
}
private void pollOnKeyPurge(String keyName) {
int pendingPollCount = 0;
while (pendingPollCount < 10) {
DeletedKey deletedKey = null;
try {
deletedKey = keyAsyncClient.getDeletedKey(keyName).block();
} catch (ResourceNotFoundException ignored) {
}
if (deletedKey != null) {
sleepIfRunningAgainstService(2000);
pendingPollCount += 1;
} else {
return;
}
}
System.err.printf("Deleted Key %s was not purged \n", keyName);
}
} |
[SpotBugs-P2] Possible null pointer dereference of encodedPair in com.microsoft.azure.servicebus.primitives.RequestResponseLink$InternalSender.runSendLoop() on exception path A reference value which is null on some exception control path is dereferenced here. This may lead to a NullPointerException when the code is executed. Note that because SpotBugs currently does not prune infeasible exception paths, this may be a false warning. Also note that SpotBugs considers the default case of a switch statement to be an exception path, since the default case is often infeasible. | private void runSendLoop() {
synchronized (this.pendingSendsSyncLock) {
if (this.isSendLoopRunning) {
return;
} else {
this.isSendLoopRunning = true;
}
}
TRACE_LOGGER.debug("Starting requestResponseLink {} internal sender send loop", this.parent.linkPath);
try {
while (this.sendLink != null && this.sendLink.getLocalState() == EndpointState.ACTIVE && this.sendLink.getRemoteState() == EndpointState.ACTIVE && this.availableCredit.get() > 0) {
String requestIdToBeSent;
synchronized (pendingSendsSyncLock) {
requestIdToBeSent = this.pendingRetrySends.poll();
if (requestIdToBeSent == null) {
requestIdToBeSent = this.pendingFreshSends.poll();
if (requestIdToBeSent == null) {
this.isSendLoopRunning = false;
TRACE_LOGGER.debug("RequestResponseLink {} internal sender send loop ending as there are no more requests enqueued.", this.parent.linkPath);
break;
}
}
}
RequestResponseWorkItem requestToBeSent = this.parent.pendingRequests.get(requestIdToBeSent);
if (requestToBeSent != null) {
Delivery delivery = this.sendLink.delivery(UUID.randomUUID().toString().getBytes(UTF_8));
delivery.setMessageFormat(DeliveryImpl.DEFAULT_MESSAGE_FORMAT);
TransactionContext transaction = requestToBeSent.getTransaction();
if (transaction != TransactionContext.NULL_TXN) {
TransactionalState transactionalState = new TransactionalState();
transactionalState.setTxnId(new Binary(transaction.getTransactionId().array()));
delivery.disposition(transactionalState);
}
Pair<byte[], Integer> encodedPair = null;
try {
encodedPair = Util.encodeMessageToOptimalSizeArray(requestToBeSent.getRequest(), this.maxMessageSize);
} catch (PayloadSizeExceededException exception) {
this.parent.exceptionallyCompleteRequest((String)requestToBeSent.getRequest().getMessageId(), new PayloadSizeExceededException(String.format("Size of the payload exceeded Maximum message size: %s kb", this.maxMessageSize / 1024), exception), false);
}
if (encodedPair != null) {
try {
int sentMsgSize = this.sendLink.send(encodedPair.getFirstItem(), 0, encodedPair.getSecondItem());
assert sentMsgSize == encodedPair.getSecondItem() : "Contract of the ProtonJ library for Sender.Send API changed";
delivery.settle();
this.availableCredit.decrementAndGet();
if (TRACE_LOGGER.isDebugEnabled()) {
TRACE_LOGGER.debug("RequestResponseLink {} internal sender sent a request. available credit :{}", this.parent.linkPath, this.availableCredit.get());
}
} catch (Exception e) {
if (TRACE_LOGGER.isErrorEnabled()) {
TRACE_LOGGER.error("RequestResponseLink {} failed to send request with request id:{}.", this.parent.linkPath, requestIdToBeSent, e);
}
this.parent.exceptionallyCompleteRequest(requestIdToBeSent, e, false);
}
} else {
if (TRACE_LOGGER.isErrorEnabled()) {
TRACE_LOGGER.error("NULL_POINTER exception: encodedPair in RequestResponseLink");
}
}
} else {
if (TRACE_LOGGER.isWarnEnabled()) {
TRACE_LOGGER.warn("Request with id:{} not found in the requestresponse link.", requestIdToBeSent);
}
}
}
} finally {
synchronized (this.pendingSendsSyncLock) {
if (this.isSendLoopRunning) {
this.isSendLoopRunning = false;
}
}
TRACE_LOGGER.debug("RequestResponseLink {} internal sender send loop stopped.", this.parent.linkPath);
}
} | if (encodedPair != null) { | private void runSendLoop() {
synchronized (this.pendingSendsSyncLock) {
if (this.isSendLoopRunning) {
return;
} else {
this.isSendLoopRunning = true;
}
}
TRACE_LOGGER.debug("Starting requestResponseLink {} internal sender send loop", this.parent.linkPath);
try {
while (this.sendLink != null && this.sendLink.getLocalState() == EndpointState.ACTIVE && this.sendLink.getRemoteState() == EndpointState.ACTIVE && this.availableCredit.get() > 0) {
String requestIdToBeSent;
synchronized (pendingSendsSyncLock) {
requestIdToBeSent = this.pendingRetrySends.poll();
if (requestIdToBeSent == null) {
requestIdToBeSent = this.pendingFreshSends.poll();
if (requestIdToBeSent == null) {
this.isSendLoopRunning = false;
TRACE_LOGGER.debug("RequestResponseLink {} internal sender send loop ending as there are no more requests enqueued.", this.parent.linkPath);
break;
}
}
}
RequestResponseWorkItem requestToBeSent = this.parent.pendingRequests.get(requestIdToBeSent);
if (requestToBeSent != null) {
Delivery delivery = this.sendLink.delivery(UUID.randomUUID().toString().getBytes(UTF_8));
delivery.setMessageFormat(DeliveryImpl.DEFAULT_MESSAGE_FORMAT);
TransactionContext transaction = requestToBeSent.getTransaction();
if (transaction != TransactionContext.NULL_TXN) {
TransactionalState transactionalState = new TransactionalState();
transactionalState.setTxnId(new Binary(transaction.getTransactionId().array()));
delivery.disposition(transactionalState);
}
Pair<byte[], Integer> encodedPair = null;
try {
encodedPair = Util.encodeMessageToOptimalSizeArray(requestToBeSent.getRequest(), this.maxMessageSize);
} catch (PayloadSizeExceededException exception) {
this.parent.exceptionallyCompleteRequest((String)requestToBeSent.getRequest().getMessageId(), new PayloadSizeExceededException(String.format("Size of the payload exceeded Maximum message size: %s kb", this.maxMessageSize / 1024), exception), false);
}
try {
int sentMsgSize = this.sendLink.send(encodedPair.getFirstItem(), 0, encodedPair.getSecondItem());
assert sentMsgSize == encodedPair.getSecondItem() : "Contract of the ProtonJ library for Sender.Send API changed";
delivery.settle();
this.availableCredit.decrementAndGet();
TRACE_LOGGER.debug("RequestResponseLink {} internal sender sent a request. available credit :{}", this.parent.linkPath, this.availableCredit.get());
} catch (Exception e) {
TRACE_LOGGER.error("RequestResponseLink {} failed to send request with request id:{}.", this.parent.linkPath, requestIdToBeSent, e);
this.parent.exceptionallyCompleteRequest(requestIdToBeSent, e, false);
}
} else {
TRACE_LOGGER.warn("Request with id:{} not found in the requestresponse link.", requestIdToBeSent);
}
}
} finally {
synchronized (this.pendingSendsSyncLock) {
if (this.isSendLoopRunning) {
this.isSendLoopRunning = false;
}
}
TRACE_LOGGER.debug("RequestResponseLink {} internal sender send loop stopped.", this.parent.linkPath);
}
} | class InternalSender extends ClientEntity implements IAmqpSender {
private Sender sendLink;
private Receiver matchingReceiveLink;
private RequestResponseLink parent;
private CompletableFuture<Void> openFuture;
private CompletableFuture<Void> closeFuture;
private AtomicInteger availableCredit;
private LinkedList<String> pendingFreshSends;
private LinkedList<String> pendingRetrySends;
private Object pendingSendsSyncLock;
private boolean isSendLoopRunning;
private int maxMessageSize;
private int linkGeneration;
protected InternalSender(String clientId, RequestResponseLink parent, InternalSender senderToBeCopied) {
super(clientId);
this.parent = parent;
this.linkGeneration = parent.internalLinkGeneration;
this.availableCredit = new AtomicInteger(0);
this.pendingSendsSyncLock = new Object();
this.isSendLoopRunning = false;
this.openFuture = new CompletableFuture<Void>();
this.closeFuture = new CompletableFuture<Void>();
if (senderToBeCopied == null) {
this.pendingFreshSends = new LinkedList<>();
this.pendingRetrySends = new LinkedList<>();
} else {
this.pendingFreshSends = senderToBeCopied.pendingFreshSends;
this.pendingRetrySends = senderToBeCopied.pendingRetrySends;
}
}
@Override
protected CompletableFuture<Void> onClose() {
this.closeInternals(true);
return this.closeFuture;
}
void closeInternals(boolean waitForCloseCompletion) {
if (!this.getIsClosed()) {
if (this.sendLink != null && this.sendLink.getLocalState() != EndpointState.CLOSED) {
try {
this.parent.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() {
@Override
public void onEvent() {
if (InternalSender.this.sendLink != null && InternalSender.this.sendLink.getLocalState() != EndpointState.CLOSED) {
TRACE_LOGGER.debug("Closing internal send link of requestresponselink to {}", RequestResponseLink.this.linkPath);
InternalSender.this.sendLink.close();
InternalSender.this.parent.underlyingFactory.deregisterForConnectionError(InternalSender.this.sendLink);
if (waitForCloseCompletion) {
RequestResponseLink.scheduleLinkCloseTimeout(InternalSender.this.closeFuture, InternalSender.this.parent.underlyingFactory.getOperationTimeout(), InternalSender.this.sendLink.getName());
} else {
AsyncUtil.completeFuture(InternalSender.this.closeFuture, null);
}
}
}
});
} catch (IOException e) {
AsyncUtil.completeFutureExceptionally(this.closeFuture, e);
}
} else {
AsyncUtil.completeFuture(this.closeFuture, null);
}
}
}
@Override
public void onOpenComplete(Exception completionException) {
if (completionException == null) {
TRACE_LOGGER.debug("Opened internal send link of requestresponselink to {}", parent.linkPath);
this.maxMessageSize = Util.getMaxMessageSizeFromLink(this.sendLink);
AsyncUtil.completeFuture(this.openFuture, null);
this.runSendLoop();
} else {
TRACE_LOGGER.error("Opening internal send link '{}' of requestresponselink to {} failed.", this.sendLink.getName(), this.parent.linkPath, completionException);
this.setClosed();
AsyncUtil.completeFuture(this.closeFuture, null);
AsyncUtil.completeFutureExceptionally(this.openFuture, completionException);
}
}
@Override
public void onError(Exception exception) {
if (!this.openFuture.isDone()) {
this.onOpenComplete(exception);
}
if (this.getIsClosingOrClosed()) {
if (!this.closeFuture.isDone()) {
TRACE_LOGGER.error("Closing internal send link '{}' of requestresponselink to {} failed.", this.sendLink.getName(), this.parent.linkPath, exception);
AsyncUtil.completeFutureExceptionally(this.closeFuture, exception);
}
} else {
TRACE_LOGGER.warn("Internal send link '{}' of requestresponselink to '{}' encountered error.", this.sendLink.getName(), this.parent.linkPath, exception);
this.parent.underlyingFactory.deregisterForConnectionError(this.sendLink);
this.matchingReceiveLink.close();
this.parent.underlyingFactory.deregisterForConnectionError(this.matchingReceiveLink);
this.parent.onInnerLinksClosed(this.linkGeneration, exception);
}
}
@Override
public void onClose(ErrorCondition condition) {
if (condition == null || condition.getCondition() == null) {
if (!this.closeFuture.isDone() && !this.closeFuture.isDone()) {
TRACE_LOGGER.info("Closed internal send link of requestresponselink to {}", this.parent.linkPath);
AsyncUtil.completeFuture(this.closeFuture, null);
}
} else {
Exception exception = ExceptionUtil.toException(condition);
this.onError(exception);
}
}
public void sendRequest(String requestId, boolean isRetry) {
synchronized (this.pendingSendsSyncLock) {
if (isRetry) {
this.pendingRetrySends.add(requestId);
} else {
this.pendingFreshSends.add(requestId);
}
if (this.isSendLoopRunning) {
return;
}
}
try {
this.parent.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() {
@Override
public void onEvent() {
InternalSender.this.runSendLoop();
}
});
} catch (IOException e) {
this.parent.exceptionallyCompleteRequest(requestId, e, true);
}
}
public void removeEnqueuedRequest(String requestId, boolean isRetry) {
synchronized (this.pendingSendsSyncLock) {
if (isRetry) {
this.pendingRetrySends.remove(requestId);
} else {
this.pendingFreshSends.remove(requestId);
}
}
}
@Override
public void onFlow(int creditIssued) {
TRACE_LOGGER.debug("RequestResonseLink {} internal sender received credit :{}", this.parent.linkPath, creditIssued);
this.availableCredit.addAndGet(creditIssued);
TRACE_LOGGER.debug("RequestResonseLink {} internal sender available credit :{}", this.parent.linkPath, this.availableCredit.get());
this.runSendLoop();
}
@Override
public void onSendComplete(Delivery delivery) {
}
public void setLinks(Sender sendLink, Receiver receiveLink) {
this.sendLink = sendLink;
this.matchingReceiveLink = receiveLink;
this.availableCredit = new AtomicInteger(0);
}
} | class InternalSender extends ClientEntity implements IAmqpSender {
private Sender sendLink;
private Receiver matchingReceiveLink;
private RequestResponseLink parent;
private CompletableFuture<Void> openFuture;
private CompletableFuture<Void> closeFuture;
private AtomicInteger availableCredit;
private LinkedList<String> pendingFreshSends;
private LinkedList<String> pendingRetrySends;
private Object pendingSendsSyncLock;
private boolean isSendLoopRunning;
private int maxMessageSize;
private int linkGeneration;
protected InternalSender(String clientId, RequestResponseLink parent, InternalSender senderToBeCopied) {
super(clientId);
this.parent = parent;
this.linkGeneration = parent.internalLinkGeneration;
this.availableCredit = new AtomicInteger(0);
this.pendingSendsSyncLock = new Object();
this.isSendLoopRunning = false;
this.openFuture = new CompletableFuture<Void>();
this.closeFuture = new CompletableFuture<Void>();
if (senderToBeCopied == null) {
this.pendingFreshSends = new LinkedList<>();
this.pendingRetrySends = new LinkedList<>();
} else {
this.pendingFreshSends = senderToBeCopied.pendingFreshSends;
this.pendingRetrySends = senderToBeCopied.pendingRetrySends;
}
}
@Override
protected CompletableFuture<Void> onClose() {
this.closeInternals(true);
return this.closeFuture;
}
void closeInternals(boolean waitForCloseCompletion) {
if (!this.getIsClosed()) {
if (this.sendLink != null && this.sendLink.getLocalState() != EndpointState.CLOSED) {
try {
this.parent.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() {
@Override
public void onEvent() {
if (InternalSender.this.sendLink != null && InternalSender.this.sendLink.getLocalState() != EndpointState.CLOSED) {
TRACE_LOGGER.debug("Closing internal send link of requestresponselink to {}", RequestResponseLink.this.linkPath);
InternalSender.this.sendLink.close();
InternalSender.this.parent.underlyingFactory.deregisterForConnectionError(InternalSender.this.sendLink);
if (waitForCloseCompletion) {
RequestResponseLink.scheduleLinkCloseTimeout(InternalSender.this.closeFuture, InternalSender.this.parent.underlyingFactory.getOperationTimeout(), InternalSender.this.sendLink.getName());
} else {
AsyncUtil.completeFuture(InternalSender.this.closeFuture, null);
}
}
}
});
} catch (IOException e) {
AsyncUtil.completeFutureExceptionally(this.closeFuture, e);
}
} else {
AsyncUtil.completeFuture(this.closeFuture, null);
}
}
}
@Override
public void onOpenComplete(Exception completionException) {
if (completionException == null) {
TRACE_LOGGER.debug("Opened internal send link of requestresponselink to {}", parent.linkPath);
this.maxMessageSize = Util.getMaxMessageSizeFromLink(this.sendLink);
AsyncUtil.completeFuture(this.openFuture, null);
this.runSendLoop();
} else {
TRACE_LOGGER.error("Opening internal send link '{}' of requestresponselink to {} failed.", this.sendLink.getName(), this.parent.linkPath, completionException);
this.setClosed();
AsyncUtil.completeFuture(this.closeFuture, null);
AsyncUtil.completeFutureExceptionally(this.openFuture, completionException);
}
}
@Override
public void onError(Exception exception) {
if (!this.openFuture.isDone()) {
this.onOpenComplete(exception);
}
if (this.getIsClosingOrClosed()) {
if (!this.closeFuture.isDone()) {
TRACE_LOGGER.error("Closing internal send link '{}' of requestresponselink to {} failed.", this.sendLink.getName(), this.parent.linkPath, exception);
AsyncUtil.completeFutureExceptionally(this.closeFuture, exception);
}
} else {
TRACE_LOGGER.warn("Internal send link '{}' of requestresponselink to '{}' encountered error.", this.sendLink.getName(), this.parent.linkPath, exception);
this.parent.underlyingFactory.deregisterForConnectionError(this.sendLink);
this.matchingReceiveLink.close();
this.parent.underlyingFactory.deregisterForConnectionError(this.matchingReceiveLink);
this.parent.onInnerLinksClosed(this.linkGeneration, exception);
}
}
@Override
public void onClose(ErrorCondition condition) {
if (condition == null || condition.getCondition() == null) {
if (!this.closeFuture.isDone() && !this.closeFuture.isDone()) {
TRACE_LOGGER.info("Closed internal send link of requestresponselink to {}", this.parent.linkPath);
AsyncUtil.completeFuture(this.closeFuture, null);
}
} else {
Exception exception = ExceptionUtil.toException(condition);
this.onError(exception);
}
}
public void sendRequest(String requestId, boolean isRetry) {
synchronized (this.pendingSendsSyncLock) {
if (isRetry) {
this.pendingRetrySends.add(requestId);
} else {
this.pendingFreshSends.add(requestId);
}
if (this.isSendLoopRunning) {
return;
}
}
try {
this.parent.underlyingFactory.scheduleOnReactorThread(new DispatchHandler() {
@Override
public void onEvent() {
InternalSender.this.runSendLoop();
}
});
} catch (IOException e) {
this.parent.exceptionallyCompleteRequest(requestId, e, true);
}
}
public void removeEnqueuedRequest(String requestId, boolean isRetry) {
synchronized (this.pendingSendsSyncLock) {
if (isRetry) {
this.pendingRetrySends.remove(requestId);
} else {
this.pendingFreshSends.remove(requestId);
}
}
}
@Override
public void onFlow(int creditIssued) {
TRACE_LOGGER.debug("RequestResonseLink {} internal sender received credit :{}", this.parent.linkPath, creditIssued);
this.availableCredit.addAndGet(creditIssued);
TRACE_LOGGER.debug("RequestResonseLink {} internal sender available credit :{}", this.parent.linkPath, this.availableCredit.get());
this.runSendLoop();
}
@Override
public void onSendComplete(Delivery delivery) {
}
public void setLinks(Sender sendLink, Receiver receiveLink) {
this.sendLink = sendLink;
this.matchingReceiveLink = receiveLink;
this.availableCredit = new AtomicInteger(0);
}
} |
use `thenApply(ctx -> {ctx.statementContext})` for better performance | public List<Rule> buildRules() {
return ImmutableList.of(
logicalJoin(any(), any())
.then(join -> {
if (hasEmptyRelationChild(join) && canReplaceJoinByEmptyRelation(join)
|| bothChildrenEmpty(join)) {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
join.getOutput());
}
return null;
}
)
.toRule(RuleType.ELIMINATE_JOIN_ON_EMPTYRELATION),
logicalFilter(logicalEmptyRelation())
.then(filter -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
filter.getOutput())
).toRule(RuleType.ELIMINATE_FILTER_ON_EMPTYRELATION),
logicalAggregate(logicalEmptyRelation())
.when(agg -> !agg.getGroupByExpressions().isEmpty())
.then(agg -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
agg.getOutput())
).toRule(RuleType.ELIMINATE_AGG_ON_EMPTYRELATION),
logicalProject(logicalEmptyRelation())
.then(project ->
new LogicalEmptyRelation(ConnectContext.get().getStatementContext().getNextRelationId(),
project.getOutputs()
))
.toRule(RuleType.ELIMINATE_AGG_ON_EMPTYRELATION),
logicalUnion(multi()).then(union -> {
if (union.children().isEmpty()) {
return null;
}
ImmutableList.Builder<Plan> nonEmptyChildrenBuilder = ImmutableList.builder();
ImmutableList.Builder<List<SlotReference>> nonEmptyOutputsBuilder = ImmutableList.builder();
for (int i = 0; i < union.arity(); i++) {
if (!(union.child(i) instanceof EmptyRelation)) {
nonEmptyChildrenBuilder.add(union.child(i));
nonEmptyOutputsBuilder.add(union.getRegularChildOutput(i));
}
}
List<Plan> nonEmptyChildren = nonEmptyChildrenBuilder.build();
if (nonEmptyChildren.isEmpty()) {
if (union.getConstantExprsList().isEmpty()) {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
union.getOutput());
} else {
return union.withChildrenAndTheirOutputs(ImmutableList.of(), ImmutableList.of());
}
} else if (nonEmptyChildren.size() == 1) {
if (union.getConstantExprsList().isEmpty()) {
Plan child = nonEmptyChildren.get(0);
List<Slot> unionOutput = union.getOutput();
List<Slot> childOutput = child.getOutput();
List<NamedExpression> projects = Lists.newArrayList();
for (int i = 0; i < unionOutput.size(); i++) {
ExprId id = unionOutput.get(i).getExprId();
Alias alias = new Alias(id, childOutput.get(i), unionOutput.get(i).getName());
projects.add(alias);
}
return new LogicalProject<>(projects, child);
}
}
if (union.children().size() != nonEmptyChildren.size()) {
return union.withChildrenAndTheirOutputs(nonEmptyChildren, nonEmptyOutputsBuilder.build());
} else {
return null;
}
}).toRule(RuleType.ELIMINATE_UNION_ON_EMPTYRELATION),
logicalTopN(logicalEmptyRelation())
.then(topn -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
topn.getOutput()))
.toRule(RuleType.ELIMINATE_TOPN_ON_EMPTYRELATION),
logicalSort(logicalEmptyRelation())
.then(sort -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
sort.getOutput()))
.toRule(RuleType.ELIMINATE_SORT_ON_EMPTYRELATION),
logicalIntersect(multi()).then(intersect -> {
List<Plan> emptyChildren = intersect.children().stream()
.filter(EmptyRelation.class::isInstance)
.collect(Collectors.toList());
if (emptyChildren.isEmpty()) {
return null;
} else {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
intersect.getOutput());
}
}).toRule(RuleType.ELIMINATE_INTERSECTION_ON_EMPTYRELATION),
logicalExcept(multi()).then(except -> {
Plan first = except.child(0);
if (first instanceof EmptyRelation) {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
except.getOutput());
} else {
ImmutableList.Builder<Plan> nonEmptyChildrenBuilder = ImmutableList.builder();
ImmutableList.Builder<List<SlotReference>> nonEmptyOutputsBuilder = ImmutableList.builder();
for (int i = 0; i < except.arity(); i++) {
if (!(except.child(i) instanceof EmptyRelation)) {
nonEmptyChildrenBuilder.add(except.child(i));
nonEmptyOutputsBuilder.add(except.getRegularChildOutput(i));
}
}
List<Plan> nonEmptyChildren = nonEmptyChildrenBuilder.build();
if (nonEmptyChildren.size() == 1) {
Plan projectChild;
if (except.getQualifier() == SetOperation.Qualifier.DISTINCT) {
List<NamedExpression> firstOutputNamedExpressions = first.getOutput()
.stream().map(slot -> (NamedExpression) slot)
.collect(ImmutableList.toImmutableList());
projectChild = new LogicalAggregate<>(ImmutableList.copyOf(firstOutputNamedExpressions),
firstOutputNamedExpressions, true, Optional.empty(), first);
} else {
projectChild = first;
}
List<Slot> exceptOutput = except.getOutput();
List<Slot> projectInputSlots = projectChild.getOutput();
List<NamedExpression> projects = Lists.newArrayList();
for (int i = 0; i < exceptOutput.size(); i++) {
ExprId id = exceptOutput.get(i).getExprId();
Alias alias = new Alias(id, projectInputSlots.get(i), exceptOutput.get(i).getName());
projects.add(alias);
}
return new LogicalProject<>(projects, projectChild);
} else if (nonEmptyChildren.size() == except.children().size()) {
return null;
} else {
return except.withChildrenAndTheirOutputs(nonEmptyChildren, nonEmptyOutputsBuilder.build());
}
}
}).toRule(RuleType.ELIMINATE_EXCEPT_ON_EMPTYRELATION)
);
} | .then(project -> | public List<Rule> buildRules() {
return ImmutableList.of(
logicalJoin(any(), any())
.when(join -> hasEmptyRelationChild(join) && canReplaceJoinByEmptyRelation(join)
|| bothChildrenEmpty(join))
.then(join -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
join.getOutput())
)
.toRule(RuleType.ELIMINATE_JOIN_ON_EMPTYRELATION),
logicalFilter(logicalEmptyRelation())
.then(filter -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
filter.getOutput())
).toRule(RuleType.ELIMINATE_FILTER_ON_EMPTYRELATION),
logicalAggregate(logicalEmptyRelation())
.when(agg -> !agg.getGroupByExpressions().isEmpty())
.then(agg -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
agg.getOutput())
).toRule(RuleType.ELIMINATE_AGG_ON_EMPTYRELATION),
logicalProject(logicalEmptyRelation())
.thenApply(ctx -> {
LogicalProject<? extends Plan> project = ctx.root;
return new LogicalEmptyRelation(ConnectContext.get().getStatementContext().getNextRelationId(),
project.getOutputs());
}).toRule(RuleType.ELIMINATE_AGG_ON_EMPTYRELATION),
logicalUnion(multi()).then(union -> {
if (union.children().isEmpty()) {
return null;
}
ImmutableList.Builder<Plan> nonEmptyChildrenBuilder = ImmutableList.builder();
ImmutableList.Builder<List<SlotReference>> nonEmptyOutputsBuilder = ImmutableList.builder();
for (int i = 0; i < union.arity(); i++) {
if (!(union.child(i) instanceof EmptyRelation)) {
nonEmptyChildrenBuilder.add(union.child(i));
nonEmptyOutputsBuilder.add(union.getRegularChildOutput(i));
}
}
List<Plan> nonEmptyChildren = nonEmptyChildrenBuilder.build();
if (nonEmptyChildren.isEmpty()) {
if (union.getConstantExprsList().isEmpty()) {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
union.getOutput());
} else {
return union.withChildrenAndTheirOutputs(ImmutableList.of(), ImmutableList.of());
}
} else if (nonEmptyChildren.size() == 1) {
if (union.getConstantExprsList().isEmpty()) {
Plan child = nonEmptyChildren.get(0);
List<Slot> unionOutput = union.getOutput();
List<Slot> childOutput = child.getOutput();
List<NamedExpression> projects = Lists.newArrayList();
for (int i = 0; i < unionOutput.size(); i++) {
ExprId id = unionOutput.get(i).getExprId();
Alias alias = new Alias(id, childOutput.get(i), unionOutput.get(i).getName());
projects.add(alias);
}
return new LogicalProject<>(projects, child);
}
}
if (union.children().size() != nonEmptyChildren.size()) {
return union.withChildrenAndTheirOutputs(nonEmptyChildren, nonEmptyOutputsBuilder.build());
} else {
return null;
}
}).toRule(RuleType.ELIMINATE_UNION_ON_EMPTYRELATION),
logicalTopN(logicalEmptyRelation())
.then(topn -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
topn.getOutput()))
.toRule(RuleType.ELIMINATE_TOPN_ON_EMPTYRELATION),
logicalSort(logicalEmptyRelation())
.then(sort -> new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
sort.getOutput()))
.toRule(RuleType.ELIMINATE_SORT_ON_EMPTYRELATION),
logicalIntersect(multi()).then(intersect -> {
List<Plan> emptyChildren = intersect.children().stream()
.filter(EmptyRelation.class::isInstance)
.collect(Collectors.toList());
if (emptyChildren.isEmpty()) {
return null;
} else {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
intersect.getOutput());
}
}).toRule(RuleType.ELIMINATE_INTERSECTION_ON_EMPTYRELATION),
logicalLimit(logicalEmptyRelation())
.then(UnaryNode::child)
.toRule(RuleType.ELIMINATE_LIMIT_ON_EMPTY_RELATION),
logicalExcept(multi()).then(except -> {
Plan first = except.child(0);
if (first instanceof EmptyRelation) {
return new LogicalEmptyRelation(
ConnectContext.get().getStatementContext().getNextRelationId(),
except.getOutput());
} else {
ImmutableList.Builder<Plan> nonEmptyChildrenBuilder = ImmutableList.builder();
ImmutableList.Builder<List<SlotReference>> nonEmptyOutputsBuilder = ImmutableList.builder();
for (int i = 0; i < except.arity(); i++) {
if (!(except.child(i) instanceof EmptyRelation)) {
nonEmptyChildrenBuilder.add(except.child(i));
nonEmptyOutputsBuilder.add(except.getRegularChildOutput(i));
}
}
List<Plan> nonEmptyChildren = nonEmptyChildrenBuilder.build();
if (nonEmptyChildren.size() == 1) {
Plan projectChild;
if (except.getQualifier() == SetOperation.Qualifier.DISTINCT) {
List<NamedExpression> firstOutputNamedExpressions = first.getOutput()
.stream().map(slot -> (NamedExpression) slot)
.collect(ImmutableList.toImmutableList());
projectChild = new LogicalAggregate<>(ImmutableList.copyOf(firstOutputNamedExpressions),
firstOutputNamedExpressions, true, Optional.empty(), first);
} else {
projectChild = first;
}
List<Slot> exceptOutput = except.getOutput();
List<Slot> projectInputSlots = projectChild.getOutput();
List<NamedExpression> projects = Lists.newArrayList();
for (int i = 0; i < exceptOutput.size(); i++) {
ExprId id = exceptOutput.get(i).getExprId();
Alias alias = new Alias(id, projectInputSlots.get(i), exceptOutput.get(i).getName());
projects.add(alias);
}
return new LogicalProject<>(projects, projectChild);
} else if (nonEmptyChildren.size() == except.children().size()) {
return null;
} else {
return except.withChildrenAndTheirOutputs(nonEmptyChildren, nonEmptyOutputsBuilder.build());
}
}
}).toRule(RuleType.ELIMINATE_EXCEPT_ON_EMPTYRELATION)
);
} | class EliminateEmptyRelation implements RewriteRuleFactory {
@Override
private boolean hasEmptyRelationChild(LogicalJoin<?, ?> join) {
return join.left() instanceof EmptyRelation || join.right() instanceof EmptyRelation;
}
private boolean bothChildrenEmpty(LogicalJoin<?, ?> join) {
return join.left() instanceof EmptyRelation && join.right() instanceof EmptyRelation;
}
private boolean canReplaceJoinByEmptyRelation(LogicalJoin<?, ?> join) {
return !join.isMarkJoin() && ((join.getJoinType() == JoinType.INNER_JOIN
|| join.getJoinType() == JoinType.LEFT_SEMI_JOIN
|| join.getJoinType() == JoinType.RIGHT_SEMI_JOIN
|| join.getJoinType() == JoinType.CROSS_JOIN)
|| (join.getJoinType() == JoinType.LEFT_OUTER_JOIN && join.left() instanceof EmptyRelation)
|| (join.getJoinType() == JoinType.RIGHT_OUTER_JOIN && join.right() instanceof EmptyRelation));
}
} | class EliminateEmptyRelation implements RewriteRuleFactory {
@Override
private boolean hasEmptyRelationChild(LogicalJoin<?, ?> join) {
return join.left() instanceof EmptyRelation || join.right() instanceof EmptyRelation;
}
private boolean bothChildrenEmpty(LogicalJoin<?, ?> join) {
return join.left() instanceof EmptyRelation && join.right() instanceof EmptyRelation;
}
private boolean canReplaceJoinByEmptyRelation(LogicalJoin<?, ?> join) {
return !join.isMarkJoin() && ((join.getJoinType() == JoinType.INNER_JOIN
|| join.getJoinType() == JoinType.LEFT_SEMI_JOIN
|| join.getJoinType() == JoinType.RIGHT_SEMI_JOIN
|| join.getJoinType() == JoinType.CROSS_JOIN)
|| (join.getJoinType() == JoinType.LEFT_OUTER_JOIN && join.left() instanceof EmptyRelation)
|| (join.getJoinType() == JoinType.RIGHT_OUTER_JOIN && join.right() instanceof EmptyRelation));
}
} |
Not a stupid question at all 😃 While it is unknown how often an `arrayOffset` and/or `position` to be set in a passed `ByteBuffer` using them ensures we're only accessing what is still considered readable by the `ByteBuffer`. For example, I create a `ByteBuffer` with a 4096 byte array, and I know the array has special metadata in the first 128 bytes. So, I'll create a wrapping `ByteBuffer` offset by 128 bytes, so its effective reading range are the bytes [128, 4096). At this point the `toString` logic highlighted will be based on that range, `new String(byte[], 128, 4096-128, UTF_8)` (pardon any off-by-one errors as I'm free-handing this). But, let's say next that I know the range [128, 1024) is either junk data or something unrelated to the string, I'll make the new position 1024-128 as there is an offset of 128. Now, the `toString` becomes `new String(byte[], 128+896, 4096, UTF_8)`. For the second question, this is more difficult on whether the `ByteBuffer`'s position should be mutated, in this case I'd say no as `toString` is more a representation and may result in broken runtime behavior if `transactionId` is fully consumed by it. Also, when debugging, many IDEs will call `toString` when inspecting the object in the debug window, likely breaking debugging. | public String toString() {
if (transactionId.hasArray()) {
return new String(transactionId.array(), transactionId.arrayOffset() + transactionId.position(),
transactionId.remaining(), StandardCharsets.UTF_8);
} else {
return new String(FluxUtil.byteBufferToArray(transactionId.duplicate()), StandardCharsets.UTF_8);
}
} | transactionId.remaining(), StandardCharsets.UTF_8); | public String toString() {
if (transactionId.hasArray()) {
return new String(transactionId.array(), transactionId.arrayOffset() + transactionId.position(),
transactionId.remaining(), StandardCharsets.UTF_8);
} else {
return new String(FluxUtil.byteBufferToArray(transactionId.duplicate()), StandardCharsets.UTF_8);
}
} | class AmqpTransaction {
private final ByteBuffer transactionId;
/**
* Creates {@link AmqpTransaction} given {@code transactionId}.
*
* @param transactionId The id for this transaction.
*
* @throws NullPointerException if {@code transactionId} is null.
*/
public AmqpTransaction(ByteBuffer transactionId) {
this.transactionId = Objects.requireNonNull(transactionId, "'transactionId' cannot be null.");
}
/**
* Gets the id for this transaction.
*
* @return The id for this transaction.
*/
public ByteBuffer getTransactionId() {
return transactionId;
}
/**
* String representation of the transaction id.
*
* @return string representation of the transaction id.
*/
} | class AmqpTransaction {
private final ByteBuffer transactionId;
/**
* Creates {@link AmqpTransaction} given {@code transactionId}.
*
* @param transactionId The id for this transaction.
*
* @throws NullPointerException if {@code transactionId} is null.
*/
public AmqpTransaction(ByteBuffer transactionId) {
this.transactionId = Objects.requireNonNull(transactionId, "'transactionId' cannot be null.");
}
/**
* Gets the id for this transaction.
*
* @return The id for this transaction.
*/
public ByteBuffer getTransactionId() {
return transactionId;
}
/**
* String representation of the transaction id.
*
* @return string representation of the transaction id.
*/
} |
Yes - there is a race. Multiple threads might call setxxxAccessor concurrently - only one thread wil "win" (enter the else statement) - this "winner" will have set the accessor and also the the xxxClassLoaded flag. So threads calling into getxxxAccessor will only not call into the xxx.initialize (with calls setxxxAcccessor) after we are sure the accessor has been set. So, yes - there is a race - and yes - it is thread-safe now. | public static void setCosmosClientBuilderAccessor(final CosmosClientBuilderAccessor newAccessor) {
if (!accessor.compareAndSet(null, newAccessor)) {
logger.debug("CosmosClientBuilderAccessor already initialized!");
} else {
logger.info("Setting CosmosClientBuilderAccessor...");
cosmosClientBuilderClassLoaded.set(true);
}
} | cosmosClientBuilderClassLoaded.set(true); | public static void setCosmosClientBuilderAccessor(final CosmosClientBuilderAccessor newAccessor) {
if (!accessor.compareAndSet(null, newAccessor)) {
logger.debug("CosmosClientBuilderAccessor already initialized!");
} else {
logger.info("Setting CosmosClientBuilderAccessor...");
cosmosClientBuilderClassLoaded.set(true);
}
} | class CosmosClientBuilderHelper {
private static final AtomicReference<CosmosClientBuilderAccessor> accessor = new AtomicReference<>();
private static final AtomicBoolean cosmosClientBuilderClassLoaded = new AtomicBoolean(false);
private CosmosClientBuilderHelper() {}
public static CosmosClientBuilderAccessor getCosmosClientBuilderAccessor() {
if (!cosmosClientBuilderClassLoaded.get()) {
logger.debug("Initializing CosmosClientBuilderAccessor...");
CosmosClientBuilder.doNothingButEnsureLoadingClass();
}
CosmosClientBuilderAccessor snapshot = accessor.get();
if (snapshot == null) {
logger.error("CosmosClientBuilderAccessor is not initialized yet!");
System.exit(9700);
}
return snapshot;
}
public interface CosmosClientBuilderAccessor {
void setCosmosClientMetadataCachesSnapshot(CosmosClientBuilder builder,
CosmosClientMetadataCachesSnapshot metadataCache);
CosmosClientMetadataCachesSnapshot getCosmosClientMetadataCachesSnapshot(CosmosClientBuilder builder);
void setCosmosClientApiType(CosmosClientBuilder builder, ApiType apiType);
ApiType getCosmosClientApiType(CosmosClientBuilder builder);
ConnectionPolicy getConnectionPolicy(CosmosClientBuilder builder);
Configs getConfigs(CosmosClientBuilder builder);
ConsistencyLevel getConsistencyLevel(CosmosClientBuilder builder);
}
} | class CosmosClientBuilderHelper {
private static final AtomicReference<CosmosClientBuilderAccessor> accessor = new AtomicReference<>();
private static final AtomicBoolean cosmosClientBuilderClassLoaded = new AtomicBoolean(false);
private CosmosClientBuilderHelper() {}
public static CosmosClientBuilderAccessor getCosmosClientBuilderAccessor() {
if (!cosmosClientBuilderClassLoaded.get()) {
logger.debug("Initializing CosmosClientBuilderAccessor...");
CosmosClientBuilder.doNothingButEnsureLoadingClass();
}
CosmosClientBuilderAccessor snapshot = accessor.get();
if (snapshot == null) {
logger.error("CosmosClientBuilderAccessor is not initialized yet!");
System.exit(9700);
}
return snapshot;
}
public interface CosmosClientBuilderAccessor {
void setCosmosClientMetadataCachesSnapshot(CosmosClientBuilder builder,
CosmosClientMetadataCachesSnapshot metadataCache);
CosmosClientMetadataCachesSnapshot getCosmosClientMetadataCachesSnapshot(CosmosClientBuilder builder);
void setCosmosClientApiType(CosmosClientBuilder builder, ApiType apiType);
ApiType getCosmosClientApiType(CosmosClientBuilder builder);
ConnectionPolicy getConnectionPolicy(CosmosClientBuilder builder);
Configs getConfigs(CosmosClientBuilder builder);
ConsistencyLevel getConsistencyLevel(CosmosClientBuilder builder);
}
} |
If try-with-resources is used and `/dev/null` feeds the input as well as output, then the `finally` block is not needed anymore. | private static String getLinuxID(String option) {
Process process = null;
BufferedReader reader = null;
try {
StringBuilder responseBuilder = new StringBuilder();
String line;
ProcessBuilder idPB = new ProcessBuilder().command("id", option);
idPB.redirectError(new File("/dev/null"));
try {
process = idPB.start();
process.getOutputStream().close();
reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
while ((line = reader.readLine()) != null) {
responseBuilder.append(line);
}
return responseBuilder.toString();
} finally {
if (process != null) {
process.getInputStream().close();
process.getErrorStream().close();
}
if (reader != null) {
reader.close();
}
}
} catch (Exception e) {
return null;
}
} | process.getInputStream().close(); | private static String getLinuxID(String option) {
Process process;
try {
StringBuilder responseBuilder = new StringBuilder();
String line;
ProcessBuilder idPB = new ProcessBuilder().command("id", option);
idPB.redirectError(new File("/dev/null"));
idPB.redirectOutput(new File("/dev/null"));
process = idPB.start();
try (InputStream inputStream = process.getInputStream()) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {
while ((line = reader.readLine()) != null) {
responseBuilder.append(line);
}
safeWaitFor(process);
return responseBuilder.toString();
}
} catch (Throwable t) {
safeWaitFor(process);
throw t;
}
} catch (IOException e) {
return null;
}
} | class NativeImagePhase implements AppCreationPhase<NativeImagePhase>, NativeImageOutcome {
private static final Logger log = Logger.getLogger(NativeImagePhase.class);
private static final String GRAALVM_HOME = "GRAALVM_HOME";
private static final String QUARKUS_PREFIX = "quarkus.";
private static final boolean IS_LINUX = System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux");
private Path outputDir;
private boolean reportErrorsAtRuntime;
private boolean debugSymbols;
private boolean debugBuildProcess;
private boolean cleanupServer;
private boolean enableHttpUrlHandler;
private boolean enableHttpsUrlHandler;
private boolean enableAllSecurityServices;
private boolean enableRetainedHeapReporting;
private boolean enableCodeSizeReporting;
private boolean enableIsolates;
private String graalvmHome;
private boolean enableServer;
private boolean enableJni;
private boolean autoServiceLoaderRegistration;
private boolean dumpProxies;
private String nativeImageXmx;
private String builderImage = "quay.io/quarkus/centos-quarkus-native-image:graalvm-1.0.0-rc14";
private String containerRuntime = "";
private List<String> containerRuntimeOptions = new ArrayList<>();
private boolean enableVMInspection;
private boolean fullStackTraces;
private boolean disableReports;
private List<String> additionalBuildArgs;
private boolean addAllCharsets;
public NativeImagePhase setAddAllCharsets(boolean addAllCharsets) {
this.addAllCharsets = addAllCharsets;
return this;
}
public NativeImagePhase setOutputDir(Path outputDir) {
this.outputDir = outputDir;
return this;
}
public NativeImagePhase setReportErrorsAtRuntime(boolean reportErrorsAtRuntime) {
this.reportErrorsAtRuntime = reportErrorsAtRuntime;
return this;
}
public NativeImagePhase setDebugSymbols(boolean debugSymbols) {
this.debugSymbols = debugSymbols;
return this;
}
public NativeImagePhase setDebugBuildProcess(boolean debugBuildProcess) {
this.debugBuildProcess = debugBuildProcess;
return this;
}
public NativeImagePhase setCleanupServer(boolean cleanupServer) {
this.cleanupServer = cleanupServer;
return this;
}
public NativeImagePhase setEnableHttpUrlHandler(boolean enableHttpUrlHandler) {
this.enableHttpUrlHandler = enableHttpUrlHandler;
return this;
}
public NativeImagePhase setEnableHttpsUrlHandler(boolean enableHttpsUrlHandler) {
this.enableHttpsUrlHandler = enableHttpsUrlHandler;
return this;
}
public NativeImagePhase setEnableAllSecurityServices(boolean enableAllSecurityServices) {
this.enableAllSecurityServices = enableAllSecurityServices;
return this;
}
public NativeImagePhase setEnableRetainedHeapReporting(boolean enableRetainedHeapReporting) {
this.enableRetainedHeapReporting = enableRetainedHeapReporting;
return this;
}
public NativeImagePhase setEnableCodeSizeReporting(boolean enableCodeSizeReporting) {
this.enableCodeSizeReporting = enableCodeSizeReporting;
return this;
}
public NativeImagePhase setEnableIsolates(boolean enableIsolates) {
this.enableIsolates = enableIsolates;
return this;
}
public NativeImagePhase setGraalvmHome(String graalvmHome) {
this.graalvmHome = graalvmHome;
return this;
}
public NativeImagePhase setEnableServer(boolean enableServer) {
this.enableServer = enableServer;
return this;
}
public NativeImagePhase setEnableJni(boolean enableJni) {
this.enableJni = enableJni;
return this;
}
public NativeImagePhase setAutoServiceLoaderRegistration(boolean autoServiceLoaderRegistration) {
this.autoServiceLoaderRegistration = autoServiceLoaderRegistration;
return this;
}
public NativeImagePhase setDumpProxies(boolean dumpProxies) {
this.dumpProxies = dumpProxies;
return this;
}
public NativeImagePhase setNativeImageXmx(String nativeImageXmx) {
this.nativeImageXmx = nativeImageXmx;
return this;
}
public NativeImagePhase setDockerBuild(String dockerBuild) {
if (dockerBuild == null) {
return this;
}
if ("false".equals(dockerBuild.toLowerCase())) {
this.containerRuntime = "";
} else {
this.containerRuntime = "docker";
if (!"true".equals(dockerBuild.toLowerCase())) {
this.builderImage = dockerBuild;
}
}
return this;
}
public NativeImagePhase setContainerRuntime(String containerRuntime) {
if (containerRuntime == null) {
return this;
}
if ("podman".equals(containerRuntime) || "docker".equals(containerRuntime)) {
this.containerRuntime = containerRuntime;
} else {
log.warn("container runtime is not docker or podman. fallback to docker");
this.containerRuntime = "docker";
}
return this;
}
public NativeImagePhase setContainerRuntimeOptions(String containerRuntimeOptions) {
if (containerRuntimeOptions != null) {
this.containerRuntimeOptions = Arrays.asList(containerRuntimeOptions.split(","));
}
return this;
}
public NativeImagePhase setEnableVMInspection(boolean enableVMInspection) {
this.enableVMInspection = enableVMInspection;
return this;
}
public NativeImagePhase setFullStackTraces(boolean fullStackTraces) {
this.fullStackTraces = fullStackTraces;
return this;
}
public NativeImagePhase setDisableReports(boolean disableReports) {
this.disableReports = disableReports;
return this;
}
public NativeImagePhase setAdditionalBuildArgs(List<String> additionalBuildArgs) {
this.additionalBuildArgs = additionalBuildArgs;
return this;
}
@Override
public void register(OutcomeProviderRegistration registration) throws AppCreatorException {
registration.provides(NativeImageOutcome.class);
}
@Override
public void provideOutcome(AppCreator ctx) throws AppCreatorException {
outputDir = outputDir == null ? ctx.getWorkPath() : IoUtils.mkdirs(outputDir);
final RunnerJarOutcome runnerJarOutcome = ctx.resolveOutcome(RunnerJarOutcome.class);
Path runnerJar = runnerJarOutcome.getRunnerJar();
boolean runnerJarCopied = false;
if (!runnerJar.getParent().equals(outputDir)) {
try {
runnerJar = IoUtils.copy(runnerJar, outputDir.resolve(runnerJar.getFileName()));
} catch (IOException e) {
throw new AppCreatorException("Failed to copy the runnable jar to the output dir", e);
}
runnerJarCopied = true;
}
final String runnerJarName = runnerJar.getFileName().toString();
Path outputLibDir = outputDir.resolve(runnerJarOutcome.getLibDir().getFileName());
boolean outputLibDirCopied = false;
if (Files.exists(outputLibDir)) {
outputLibDir = null;
} else {
try {
IoUtils.copy(runnerJarOutcome.getLibDir(), outputLibDir);
} catch (IOException e) {
throw new AppCreatorException("Failed to copy the runnable jar and the lib to the docker project dir", e);
}
outputLibDirCopied = true;
}
final Config config = SmallRyeConfigProviderResolver.instance().getConfig();
boolean vmVersionOutOfDate = isThisGraalVMRCObsolete();
HashMap<String, String> env = new HashMap<>(System.getenv());
List<String> nativeImage;
String noPIE = "";
if (!"".equals(containerRuntime)) {
nativeImage = new ArrayList<>();
Collections.addAll(nativeImage, containerRuntime, "run", "-v", outputDir.toAbsolutePath() + ":/project:z", "--rm");
if (IS_LINUX & "docker".equals(containerRuntime)) {
String uid = getLinuxID("-ur");
String gid = getLinuxID("-gr");
if (uid != null & gid != null & !"".equals(uid) & !"".equals(gid)) {
Collections.addAll(nativeImage, "--user", uid.concat(":").concat(gid));
}
}
nativeImage.addAll(containerRuntimeOptions);
nativeImage.add(this.builderImage);
} else {
if (IS_LINUX) {
noPIE = detectNoPIE();
}
String graalvmHome = this.graalvmHome;
if (graalvmHome != null) {
env.put(GRAALVM_HOME, graalvmHome);
} else {
graalvmHome = env.get(GRAALVM_HOME);
if (graalvmHome == null) {
throw new AppCreatorException("GRAALVM_HOME was not set");
}
}
nativeImage = Collections.singletonList(graalvmHome + File.separator + "bin" + File.separator + "native-image");
}
try {
List<String> command = new ArrayList<>();
command.addAll(nativeImage);
if (cleanupServer) {
List<String> cleanup = new ArrayList<>(nativeImage);
cleanup.add("--server-shutdown");
ProcessBuilder pb = new ProcessBuilder(cleanup.toArray(new String[0]));
pb.directory(outputDir.toFile());
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
pb.redirectOutput(ProcessBuilder.Redirect.INHERIT);
pb.redirectError(ProcessBuilder.Redirect.INHERIT);
Process process = pb.start();
process.waitFor();
}
final Path propsFile = ctx.resolveOutcome(AugmentOutcome.class).getAppClassesDir()
.resolve("native-image.properties");
boolean enableSslNative = false;
if (Files.exists(propsFile)) {
final Properties properties = new Properties();
try (BufferedReader reader = Files.newBufferedReader(propsFile, StandardCharsets.UTF_8)) {
properties.load(reader);
}
for (String propertyName : properties.stringPropertyNames()) {
if (propertyName.startsWith(QUARKUS_PREFIX)) {
continue;
}
final String propertyValue = properties.getProperty(propertyName);
if (propertyValue == null) {
command.add("-J-D" + propertyName);
} else {
command.add("-J-D" + propertyName + "=" + propertyValue);
}
}
enableSslNative = properties.getProperty("quarkus.ssl.native") != null
? Boolean.parseBoolean(properties.getProperty("quarkus.ssl.native"))
: false;
}
if (enableSslNative) {
enableHttpsUrlHandler = true;
enableJni = true;
enableAllSecurityServices = true;
}
if (additionalBuildArgs != null) {
additionalBuildArgs.forEach(command::add);
}
command.add("-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime");
command.add("-jar");
command.add(runnerJarName);
command.add("-J-Djava.util.concurrent.ForkJoinPool.common.parallelism=1");
if (reportErrorsAtRuntime) {
command.add("-H:+ReportUnsupportedElementsAtRuntime");
}
if (debugSymbols) {
command.add("-g");
}
if (debugBuildProcess) {
command.add("-J-Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=y");
}
if (!disableReports) {
command.add("-H:+PrintAnalysisCallTree");
}
if (dumpProxies) {
command.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true");
if (enableServer) {
log.warn(
"Options dumpProxies and enableServer are both enabled: this will get the proxies dumped in an unknown external working directory");
}
}
if (nativeImageXmx != null) {
command.add("-J-Xmx" + nativeImageXmx);
}
List<String> protocols = new ArrayList<>(2);
if (enableHttpUrlHandler) {
protocols.add("http");
}
if (enableHttpsUrlHandler) {
protocols.add("https");
}
if (addAllCharsets) {
command.add("-H:+AddAllCharsets");
} else {
command.add("-H:-AddAllCharsets");
}
if (!protocols.isEmpty()) {
command.add("-H:EnableURLProtocols=" + String.join(",", protocols));
}
if (enableAllSecurityServices) {
command.add("--enable-all-security-services");
}
if (!noPIE.isEmpty()) {
command.add("-H:NativeLinkerOption=" + noPIE);
}
if (enableRetainedHeapReporting) {
command.add("-H:+PrintRetainedHeapHistogram");
}
if (enableCodeSizeReporting) {
command.add("-H:+PrintCodeSizeReport");
}
if (!enableIsolates) {
command.add("-H:-SpawnIsolates");
}
if (enableJni) {
command.add("-H:+JNI");
} else {
command.add("-H:-JNI");
}
if (!enableServer) {
command.add("--no-server");
}
if (enableVMInspection) {
command.add("-H:+AllowVMInspection");
}
if (autoServiceLoaderRegistration) {
command.add("-H:+UseServiceLoaderFeature");
command.add("-H:+TraceServiceLoaderFeature");
} else {
command.add("-H:-UseServiceLoaderFeature");
}
if (fullStackTraces) {
command.add("-H:+StackTrace");
} else {
command.add("-H:-StackTrace");
}
log.info(command.stream().collect(Collectors.joining(" ")));
CountDownLatch errorReportLatch = new CountDownLatch(1);
ProcessBuilder pb = new ProcessBuilder(command.toArray(new String[0]));
pb.directory(outputDir.toFile());
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
pb.redirectOutput(ProcessBuilder.Redirect.INHERIT);
Process process = pb.start();
new Thread(new ErrorReplacingProcessReader(process.getErrorStream(), outputDir.resolve("reports").toFile(),
errorReportLatch)).start();
errorReportLatch.await();
if (process.waitFor() != 0) {
throw new RuntimeException("Image generation failed");
}
System.setProperty("native.image.path", runnerJarName.substring(0, runnerJarName.lastIndexOf('.')));
ctx.pushOutcome(NativeImageOutcome.class, this);
} catch (Exception e) {
throw new AppCreatorException("Failed to build native image", e);
} finally {
if (runnerJarCopied) {
IoUtils.recursiveDelete(runnerJar);
}
if (outputLibDirCopied) {
IoUtils.recursiveDelete(outputLibDir);
}
}
}
private boolean isThisGraalVMRCObsolete() {
final String vmName = System.getProperty("java.vm.name");
log.info("Running Quarkus native-image plugin on " + vmName);
final List<String> obsoleteGraalVmVersions = Arrays.asList("-rc9", "-rc10", "-rc11", "-rc12", "-rc13");
final boolean vmVersionIsObsolete = obsoleteGraalVmVersions.stream().anyMatch(vmName::contains);
if (vmVersionIsObsolete) {
log.error("Out of date RC build of GraalVM detected! Please upgrade to RC14");
return true;
}
return false;
}
private static String detectNoPIE() {
String argument = testGCCArgument("-no-pie");
return argument.length() == 0 ? testGCCArgument("-nopie") : argument;
}
private static String testGCCArgument(String argument) {
try {
Process gcc = new ProcessBuilder("cc", "-v", "-E", argument, "-").start();
gcc.getOutputStream().close();
if (gcc.waitFor() == 0) {
return argument;
}
} catch (IOException | InterruptedException e) {
}
return "";
}
@Override
public String getConfigPropertyName() {
return "native-image";
}
@Override
public PropertiesHandler<NativeImagePhase> getPropertiesHandler() {
return new PropertiesHandler<NativeImagePhase>() {
@Override
public NativeImagePhase getTarget() {
return NativeImagePhase.this;
}
@Override
public boolean set(NativeImagePhase t, PropertyContext ctx) {
final String value = ctx.getValue();
switch (ctx.getRelativeName()) {
case "output":
t.setOutputDir(Paths.get(value));
break;
case "report-errors-at-runtime":
t.setReportErrorsAtRuntime(Boolean.parseBoolean(value));
break;
case "debug-symbols":
t.setDebugSymbols(Boolean.parseBoolean(value));
break;
case "debug-build-process":
t.setDebugBuildProcess(Boolean.parseBoolean(value));
break;
case "cleanup-server":
t.setCleanupServer(Boolean.parseBoolean(value));
break;
case "enable-http-url-handler":
t.setEnableHttpUrlHandler(Boolean.parseBoolean(value));
break;
case "enable-https-url-handler":
t.setEnableHttpsUrlHandler(Boolean.parseBoolean(value));
break;
case "enable-all-security-services":
t.setEnableAllSecurityServices(Boolean.parseBoolean(value));
break;
case "enable-retained-heap-reporting":
t.setEnableRetainedHeapReporting(Boolean.parseBoolean(value));
break;
case "enable-code-size-reporting":
t.setEnableCodeSizeReporting(Boolean.parseBoolean(value));
break;
case "enable-isolates":
t.setEnableIsolates(Boolean.parseBoolean(value));
break;
case "graalvm-home":
t.setGraalvmHome(value);
break;
case "enable-server":
t.setEnableServer(Boolean.parseBoolean(value));
break;
case "enable-jni":
t.setEnableJni(Boolean.parseBoolean(value));
break;
case "auto-service-loader-registration":
t.setAutoServiceLoaderRegistration(Boolean.parseBoolean(value));
break;
case "dump-proxies":
t.setDumpProxies(Boolean.parseBoolean(value));
break;
case "native-image-xmx":
t.setNativeImageXmx(value);
break;
case "docker-build":
t.setDockerBuild(value);
break;
case "enable-vm-inspection":
t.setEnableVMInspection(Boolean.parseBoolean(value));
break;
case "full-stack-traces":
t.setFullStackTraces(Boolean.parseBoolean(value));
break;
case "disable-reports":
t.setDisableReports(Boolean.parseBoolean(value));
break;
case "additional-build-args":
t.setAdditionalBuildArgs(Arrays.asList(value.split(",")));
break;
default:
return false;
}
return true;
}
};
}
} | class NativeImagePhase implements AppCreationPhase<NativeImagePhase>, NativeImageOutcome {
private static final Logger log = Logger.getLogger(NativeImagePhase.class);
private static final String GRAALVM_HOME = "GRAALVM_HOME";
private static final String QUARKUS_PREFIX = "quarkus.";
private static final boolean IS_LINUX = System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux");
private Path outputDir;
private boolean reportErrorsAtRuntime;
private boolean debugSymbols;
private boolean debugBuildProcess;
private boolean cleanupServer;
private boolean enableHttpUrlHandler;
private boolean enableHttpsUrlHandler;
private boolean enableAllSecurityServices;
private boolean enableRetainedHeapReporting;
private boolean enableCodeSizeReporting;
private boolean enableIsolates;
private String graalvmHome;
private boolean enableServer;
private boolean enableJni;
private boolean autoServiceLoaderRegistration;
private boolean dumpProxies;
private String nativeImageXmx;
private String builderImage = "quay.io/quarkus/centos-quarkus-native-image:graalvm-1.0.0-rc14";
private String containerRuntime = "";
private List<String> containerRuntimeOptions = new ArrayList<>();
private boolean enableVMInspection;
private boolean fullStackTraces;
private boolean disableReports;
private List<String> additionalBuildArgs;
private boolean addAllCharsets;
public NativeImagePhase setAddAllCharsets(boolean addAllCharsets) {
this.addAllCharsets = addAllCharsets;
return this;
}
public NativeImagePhase setOutputDir(Path outputDir) {
this.outputDir = outputDir;
return this;
}
public NativeImagePhase setReportErrorsAtRuntime(boolean reportErrorsAtRuntime) {
this.reportErrorsAtRuntime = reportErrorsAtRuntime;
return this;
}
public NativeImagePhase setDebugSymbols(boolean debugSymbols) {
this.debugSymbols = debugSymbols;
return this;
}
public NativeImagePhase setDebugBuildProcess(boolean debugBuildProcess) {
this.debugBuildProcess = debugBuildProcess;
return this;
}
public NativeImagePhase setCleanupServer(boolean cleanupServer) {
this.cleanupServer = cleanupServer;
return this;
}
public NativeImagePhase setEnableHttpUrlHandler(boolean enableHttpUrlHandler) {
this.enableHttpUrlHandler = enableHttpUrlHandler;
return this;
}
public NativeImagePhase setEnableHttpsUrlHandler(boolean enableHttpsUrlHandler) {
this.enableHttpsUrlHandler = enableHttpsUrlHandler;
return this;
}
public NativeImagePhase setEnableAllSecurityServices(boolean enableAllSecurityServices) {
this.enableAllSecurityServices = enableAllSecurityServices;
return this;
}
public NativeImagePhase setEnableRetainedHeapReporting(boolean enableRetainedHeapReporting) {
this.enableRetainedHeapReporting = enableRetainedHeapReporting;
return this;
}
public NativeImagePhase setEnableCodeSizeReporting(boolean enableCodeSizeReporting) {
this.enableCodeSizeReporting = enableCodeSizeReporting;
return this;
}
public NativeImagePhase setEnableIsolates(boolean enableIsolates) {
this.enableIsolates = enableIsolates;
return this;
}
public NativeImagePhase setGraalvmHome(String graalvmHome) {
this.graalvmHome = graalvmHome;
return this;
}
public NativeImagePhase setEnableServer(boolean enableServer) {
this.enableServer = enableServer;
return this;
}
public NativeImagePhase setEnableJni(boolean enableJni) {
this.enableJni = enableJni;
return this;
}
public NativeImagePhase setAutoServiceLoaderRegistration(boolean autoServiceLoaderRegistration) {
this.autoServiceLoaderRegistration = autoServiceLoaderRegistration;
return this;
}
public NativeImagePhase setDumpProxies(boolean dumpProxies) {
this.dumpProxies = dumpProxies;
return this;
}
public NativeImagePhase setNativeImageXmx(String nativeImageXmx) {
this.nativeImageXmx = nativeImageXmx;
return this;
}
public NativeImagePhase setDockerBuild(String dockerBuild) {
if (dockerBuild == null) {
return this;
}
if ("false".equals(dockerBuild.toLowerCase())) {
this.containerRuntime = "";
} else {
this.containerRuntime = "docker";
if (!"true".equals(dockerBuild.toLowerCase())) {
this.builderImage = dockerBuild;
}
}
return this;
}
public NativeImagePhase setContainerRuntime(String containerRuntime) {
if (containerRuntime == null) {
return this;
}
if ("podman".equals(containerRuntime) || "docker".equals(containerRuntime)) {
this.containerRuntime = containerRuntime;
} else {
log.warn("container runtime is not docker or podman. fallback to docker");
this.containerRuntime = "docker";
}
return this;
}
public NativeImagePhase setContainerRuntimeOptions(String containerRuntimeOptions) {
if (containerRuntimeOptions != null) {
this.containerRuntimeOptions = Arrays.asList(containerRuntimeOptions.split(","));
}
return this;
}
public NativeImagePhase setEnableVMInspection(boolean enableVMInspection) {
this.enableVMInspection = enableVMInspection;
return this;
}
public NativeImagePhase setFullStackTraces(boolean fullStackTraces) {
this.fullStackTraces = fullStackTraces;
return this;
}
public NativeImagePhase setDisableReports(boolean disableReports) {
this.disableReports = disableReports;
return this;
}
public NativeImagePhase setAdditionalBuildArgs(List<String> additionalBuildArgs) {
this.additionalBuildArgs = additionalBuildArgs;
return this;
}
@Override
public void register(OutcomeProviderRegistration registration) throws AppCreatorException {
registration.provides(NativeImageOutcome.class);
}
@Override
public void provideOutcome(AppCreator ctx) throws AppCreatorException {
outputDir = outputDir == null ? ctx.getWorkPath() : IoUtils.mkdirs(outputDir);
final RunnerJarOutcome runnerJarOutcome = ctx.resolveOutcome(RunnerJarOutcome.class);
Path runnerJar = runnerJarOutcome.getRunnerJar();
boolean runnerJarCopied = false;
if (!runnerJar.getParent().equals(outputDir)) {
try {
runnerJar = IoUtils.copy(runnerJar, outputDir.resolve(runnerJar.getFileName()));
} catch (IOException e) {
throw new AppCreatorException("Failed to copy the runnable jar to the output dir", e);
}
runnerJarCopied = true;
}
final String runnerJarName = runnerJar.getFileName().toString();
Path outputLibDir = outputDir.resolve(runnerJarOutcome.getLibDir().getFileName());
boolean outputLibDirCopied = false;
if (Files.exists(outputLibDir)) {
outputLibDir = null;
} else {
try {
IoUtils.copy(runnerJarOutcome.getLibDir(), outputLibDir);
} catch (IOException e) {
throw new AppCreatorException("Failed to copy the runnable jar and the lib to the docker project dir", e);
}
outputLibDirCopied = true;
}
final Config config = SmallRyeConfigProviderResolver.instance().getConfig();
boolean vmVersionOutOfDate = isThisGraalVMRCObsolete();
HashMap<String, String> env = new HashMap<>(System.getenv());
List<String> nativeImage;
String noPIE = "";
if (!"".equals(containerRuntime)) {
nativeImage = new ArrayList<>();
Collections.addAll(nativeImage, containerRuntime, "run", "-v", outputDir.toAbsolutePath() + ":/project:z", "--rm");
if (IS_LINUX & "docker".equals(containerRuntime)) {
String uid = getLinuxID("-ur");
String gid = getLinuxID("-gr");
if (uid != null & gid != null & !"".equals(uid) & !"".equals(gid)) {
Collections.addAll(nativeImage, "--user", uid.concat(":").concat(gid));
}
}
nativeImage.addAll(containerRuntimeOptions);
nativeImage.add(this.builderImage);
} else {
if (IS_LINUX) {
noPIE = detectNoPIE();
}
String graalvmHome = this.graalvmHome;
if (graalvmHome != null) {
env.put(GRAALVM_HOME, graalvmHome);
} else {
graalvmHome = env.get(GRAALVM_HOME);
if (graalvmHome == null) {
throw new AppCreatorException("GRAALVM_HOME was not set");
}
}
nativeImage = Collections.singletonList(graalvmHome + File.separator + "bin" + File.separator + "native-image");
}
try {
List<String> command = new ArrayList<>();
command.addAll(nativeImage);
if (cleanupServer) {
List<String> cleanup = new ArrayList<>(nativeImage);
cleanup.add("--server-shutdown");
ProcessBuilder pb = new ProcessBuilder(cleanup.toArray(new String[0]));
pb.directory(outputDir.toFile());
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
pb.redirectOutput(ProcessBuilder.Redirect.INHERIT);
pb.redirectError(ProcessBuilder.Redirect.INHERIT);
Process process = pb.start();
process.waitFor();
}
final Path propsFile = ctx.resolveOutcome(AugmentOutcome.class).getAppClassesDir()
.resolve("native-image.properties");
boolean enableSslNative = false;
if (Files.exists(propsFile)) {
final Properties properties = new Properties();
try (BufferedReader reader = Files.newBufferedReader(propsFile, StandardCharsets.UTF_8)) {
properties.load(reader);
}
for (String propertyName : properties.stringPropertyNames()) {
if (propertyName.startsWith(QUARKUS_PREFIX)) {
continue;
}
final String propertyValue = properties.getProperty(propertyName);
if (propertyValue == null) {
command.add("-J-D" + propertyName);
} else {
command.add("-J-D" + propertyName + "=" + propertyValue);
}
}
enableSslNative = properties.getProperty("quarkus.ssl.native") != null
? Boolean.parseBoolean(properties.getProperty("quarkus.ssl.native"))
: false;
}
if (enableSslNative) {
enableHttpsUrlHandler = true;
enableJni = true;
enableAllSecurityServices = true;
}
if (additionalBuildArgs != null) {
additionalBuildArgs.forEach(command::add);
}
command.add("-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime");
command.add("-jar");
command.add(runnerJarName);
command.add("-J-Djava.util.concurrent.ForkJoinPool.common.parallelism=1");
if (reportErrorsAtRuntime) {
command.add("-H:+ReportUnsupportedElementsAtRuntime");
}
if (debugSymbols) {
command.add("-g");
}
if (debugBuildProcess) {
command.add("-J-Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=y");
}
if (!disableReports) {
command.add("-H:+PrintAnalysisCallTree");
}
if (dumpProxies) {
command.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true");
if (enableServer) {
log.warn(
"Options dumpProxies and enableServer are both enabled: this will get the proxies dumped in an unknown external working directory");
}
}
if (nativeImageXmx != null) {
command.add("-J-Xmx" + nativeImageXmx);
}
List<String> protocols = new ArrayList<>(2);
if (enableHttpUrlHandler) {
protocols.add("http");
}
if (enableHttpsUrlHandler) {
protocols.add("https");
}
if (addAllCharsets) {
command.add("-H:+AddAllCharsets");
} else {
command.add("-H:-AddAllCharsets");
}
if (!protocols.isEmpty()) {
command.add("-H:EnableURLProtocols=" + String.join(",", protocols));
}
if (enableAllSecurityServices) {
command.add("--enable-all-security-services");
}
if (!noPIE.isEmpty()) {
command.add("-H:NativeLinkerOption=" + noPIE);
}
if (enableRetainedHeapReporting) {
command.add("-H:+PrintRetainedHeapHistogram");
}
if (enableCodeSizeReporting) {
command.add("-H:+PrintCodeSizeReport");
}
if (!enableIsolates) {
command.add("-H:-SpawnIsolates");
}
if (enableJni) {
command.add("-H:+JNI");
} else {
command.add("-H:-JNI");
}
if (!enableServer) {
command.add("--no-server");
}
if (enableVMInspection) {
command.add("-H:+AllowVMInspection");
}
if (autoServiceLoaderRegistration) {
command.add("-H:+UseServiceLoaderFeature");
command.add("-H:+TraceServiceLoaderFeature");
} else {
command.add("-H:-UseServiceLoaderFeature");
}
if (fullStackTraces) {
command.add("-H:+StackTrace");
} else {
command.add("-H:-StackTrace");
}
log.info(command.stream().collect(Collectors.joining(" ")));
CountDownLatch errorReportLatch = new CountDownLatch(1);
ProcessBuilder pb = new ProcessBuilder(command.toArray(new String[0]));
pb.directory(outputDir.toFile());
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
pb.redirectOutput(ProcessBuilder.Redirect.INHERIT);
Process process = pb.start();
new Thread(new ErrorReplacingProcessReader(process.getErrorStream(), outputDir.resolve("reports").toFile(),
errorReportLatch)).start();
errorReportLatch.await();
if (process.waitFor() != 0) {
throw new RuntimeException("Image generation failed");
}
System.setProperty("native.image.path", runnerJarName.substring(0, runnerJarName.lastIndexOf('.')));
ctx.pushOutcome(NativeImageOutcome.class, this);
} catch (Exception e) {
throw new AppCreatorException("Failed to build native image", e);
} finally {
if (runnerJarCopied) {
IoUtils.recursiveDelete(runnerJar);
}
if (outputLibDirCopied) {
IoUtils.recursiveDelete(outputLibDir);
}
}
}
private boolean isThisGraalVMRCObsolete() {
final String vmName = System.getProperty("java.vm.name");
log.info("Running Quarkus native-image plugin on " + vmName);
final List<String> obsoleteGraalVmVersions = Arrays.asList("-rc9", "-rc10", "-rc11", "-rc12", "-rc13");
final boolean vmVersionIsObsolete = obsoleteGraalVmVersions.stream().anyMatch(vmName::contains);
if (vmVersionIsObsolete) {
log.error("Out of date RC build of GraalVM detected! Please upgrade to RC14");
return true;
}
return false;
}
static void safeWaitFor(Process process) {
boolean intr = false;
try {
for (;;)
try {
process.waitFor();
return;
} catch (InterruptedException ex) {
intr = true;
}
} finally {
if (intr)
Thread.currentThread().interrupt();
}
}
private static String detectNoPIE() {
String argument = testGCCArgument("-no-pie");
return argument.length() == 0 ? testGCCArgument("-nopie") : argument;
}
private static String testGCCArgument(String argument) {
try {
Process gcc = new ProcessBuilder("cc", "-v", "-E", argument, "-").start();
gcc.getOutputStream().close();
if (gcc.waitFor() == 0) {
return argument;
}
} catch (IOException | InterruptedException e) {
}
return "";
}
@Override
public String getConfigPropertyName() {
return "native-image";
}
@Override
public PropertiesHandler<NativeImagePhase> getPropertiesHandler() {
return new PropertiesHandler<NativeImagePhase>() {
@Override
public NativeImagePhase getTarget() {
return NativeImagePhase.this;
}
@Override
public boolean set(NativeImagePhase t, PropertyContext ctx) {
final String value = ctx.getValue();
switch (ctx.getRelativeName()) {
case "output":
t.setOutputDir(Paths.get(value));
break;
case "report-errors-at-runtime":
t.setReportErrorsAtRuntime(Boolean.parseBoolean(value));
break;
case "debug-symbols":
t.setDebugSymbols(Boolean.parseBoolean(value));
break;
case "debug-build-process":
t.setDebugBuildProcess(Boolean.parseBoolean(value));
break;
case "cleanup-server":
t.setCleanupServer(Boolean.parseBoolean(value));
break;
case "enable-http-url-handler":
t.setEnableHttpUrlHandler(Boolean.parseBoolean(value));
break;
case "enable-https-url-handler":
t.setEnableHttpsUrlHandler(Boolean.parseBoolean(value));
break;
case "enable-all-security-services":
t.setEnableAllSecurityServices(Boolean.parseBoolean(value));
break;
case "enable-retained-heap-reporting":
t.setEnableRetainedHeapReporting(Boolean.parseBoolean(value));
break;
case "enable-code-size-reporting":
t.setEnableCodeSizeReporting(Boolean.parseBoolean(value));
break;
case "enable-isolates":
t.setEnableIsolates(Boolean.parseBoolean(value));
break;
case "graalvm-home":
t.setGraalvmHome(value);
break;
case "enable-server":
t.setEnableServer(Boolean.parseBoolean(value));
break;
case "enable-jni":
t.setEnableJni(Boolean.parseBoolean(value));
break;
case "auto-service-loader-registration":
t.setAutoServiceLoaderRegistration(Boolean.parseBoolean(value));
break;
case "dump-proxies":
t.setDumpProxies(Boolean.parseBoolean(value));
break;
case "native-image-xmx":
t.setNativeImageXmx(value);
break;
case "docker-build":
t.setDockerBuild(value);
break;
case "enable-vm-inspection":
t.setEnableVMInspection(Boolean.parseBoolean(value));
break;
case "full-stack-traces":
t.setFullStackTraces(Boolean.parseBoolean(value));
break;
case "disable-reports":
t.setDisableReports(Boolean.parseBoolean(value));
break;
case "additional-build-args":
t.setAdditionalBuildArgs(Arrays.asList(value.split(",")));
break;
default:
return false;
}
return true;
}
};
}
} |
Good catch. This has no use since we checked whether it's lower case. | public CloudEvent addExtensionAttribute(String name, Object value) {
if (Objects.isNull(name)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'name' cannot be null."));
}
if (Objects.isNull(value)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'value' cannot be null."));
}
if (!validateAttributeName(name)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"'name' must have only small-case alphanumeric characters and not be one of the CloudEvent reserved "
+ "attribute names"));
}
if (this.extensionAttributes == null) {
this.extensionAttributes = new HashMap<>();
}
this.extensionAttributes.put(name.toLowerCase(Locale.ENGLISH), value);
return this;
} | this.extensionAttributes.put(name.toLowerCase(Locale.ENGLISH), value); | public CloudEvent addExtensionAttribute(String name, Object value) {
Objects.requireNonNull(name, "'name' cannot be null.");
Objects.requireNonNull(value, "'value' cannot be null.");
if (!validateAttributeName(name)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"Extension attribute 'name' must have only lower-case alphanumeric characters and not be one of the "
+ "CloudEvent reserved attribute names: " + String.join(",", RESERVED_ATTRIBUTE_NAMES)));
}
if (this.extensionAttributes == null) {
this.extensionAttributes = new HashMap<>();
}
this.extensionAttributes.put(name, value);
return this;
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private static final JsonSerializer SERIALIZER;
static {
JsonSerializer tmp;
try {
tmp = JsonSerializerProviders.createInstance();
} catch (IllegalStateException e) {
tmp = new JacksonSerializer();
}
SERIALIZER = tmp;
}
private static final ClientLogger LOGGER = new ClientLogger(CloudEvent.class);
private static final Set<String> RESERVED_ATTRIBUTE_NAMES = new HashSet<>(Arrays.asList(
"specversion",
"id",
"source",
"type",
"datacontenttype",
"dataschema",
"subject",
"time",
"data"
));
/*
* An identifier for the event. The combination of id and source must be
* unique for each distinct event.
*/
@JsonProperty(value = "id", required = true)
private String id;
/*
* Identifies the context in which an event happened. The combination of id
* and source must be unique for each distinct event.
*/
@JsonProperty(value = "source", required = true)
private String source;
/*
* Event data specific to the event type.
*/
@JsonProperty(value = "data")
private Object data;
/*
* Event data specific to the event type, encoded as a base64 string.
*/
@JsonProperty(value = "data_base64")
private String dataBase64;
/*
* Type of event related to the originating occurrence.
*/
@JsonProperty(value = "type", required = true)
private String type;
/*
* The time (in UTC) the event was generated, in RFC3339 format.
*/
@JsonProperty(value = "time")
private OffsetDateTime time;
/*
* The version of the CloudEvents specification which the event uses.
*/
@JsonProperty(value = "specversion", required = true)
private String specVersion;
/*
* Identifies the schema that data adheres to.
*/
@JsonProperty(value = "dataschema")
private String dataSchema;
/*
* Content type of data value.
*/
@JsonProperty(value = "datacontenttype")
private String dataContentType;
/*
* This describes the subject of the event in the context of the event
* producer (identified by source).
*/
@JsonProperty(value = "subject")
private String subject;
@JsonIgnore
private Map<String, Object> extensionAttributes;
/*
* Cache serialized data for getData()
*/
@JsonIgnore
private BinaryData binaryData;
/**
*
* @param source Identifies the context in which an event happened. The combination of id and source must be unique
* for each distinct event.
* @param type Type of event related to the originating occurrence.
* @param data A {@link BinaryData} that wraps the original data, which can be a String, byte[], or model class.
* @param format Set to {@link CloudEventDataFormat
* {@link CloudEventDataFormat
* @param dataContentType The content type of the data. It has no impact on how the data is serialized but tells
* the event subscriber how to use the data. Typically the value is of MIME types such as
* "application/json", "text/plain", "text/xml", "application/+avro", etc. It can be null.
* @throws NullPointerException if source, type, data, or format is null.
*/
public CloudEvent(String source, String type, BinaryData data, CloudEventDataFormat format, String dataContentType) {
if (Objects.isNull(source)) {
throw LOGGER.logExceptionAsError(new NullPointerException("'source' cannot be null."));
}
if (Objects.isNull(type)) {
throw LOGGER.logExceptionAsError(new NullPointerException("'type' cannot be null."));
}
if (Objects.isNull(data)) {
throw LOGGER.logExceptionAsError(new NullPointerException("'data' cannot be null."));
}
if (Objects.isNull(format)) {
throw LOGGER.logExceptionAsError(new NullPointerException("'format' cannot be null."));
}
this.source = source;
this.type = type;
if (CloudEventDataFormat.BYTES == format) {
this.dataBase64 = Base64.getEncoder().encodeToString(data.toBytes());
} else {
this.data = data.toString();
}
this.dataContentType = dataContentType;
this.id = UUID.randomUUID().toString();
this.specVersion = CloudEvent.SPEC_VERSION;
}
private CloudEvent() {
}
/**
* Deserialize a list of {@link CloudEvent CloudEvents} from a JSON string and validate whether any CloudEvents have
* null id', 'source', or 'type'. If you want to skip this validation, use {@link
* @param cloudEventsJson the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}.
* @throws NullPointerException if cloudEventsJson is null.
* @throws IllegalArgumentException if the input parameter isn't a correct JSON string for a cloud event
* or an array of it, or any deserialized CloudEvents have null 'id', 'source', or 'type'.
*/
public static List<CloudEvent> fromString(String cloudEventsJson) {
return fromString(cloudEventsJson, false);
}
/**
* Deserialize a list of {@link CloudEvent CloudEvents} from a JSON string.
* @param cloudEventsJson the JSON payload containing one or more events.
* @param skipValidation set to true if you'd like to skip the validation for the deserialized CloudEvents. A valid
* CloudEvent should have 'id', 'source' and 'type' not null.
*
* @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}.
* @throws NullPointerException if cloudEventsJson is null.
* @throws IllegalArgumentException if the input parameter isn't a JSON string for a cloud event or an array of it,
* or skipValidation is false and any CloudEvents have null id', 'source', or 'type'.
*/
public static List<CloudEvent> fromString(String cloudEventsJson, boolean skipValidation) {
if (cloudEventsJson == null) {
throw LOGGER.logExceptionAsError(new NullPointerException("'cloudEventsJson' cannot be null"));
}
try {
List<CloudEvent> events = Arrays.asList(SERIALIZER.deserialize(
new ByteArrayInputStream(cloudEventsJson.getBytes(StandardCharsets.UTF_8)),
TypeReference.createInstance(CloudEvent[].class)));
if (!skipValidation) {
for (CloudEvent event : events) {
if (event.getId() == null || event.getSource() == null || event.getType() == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"'id', 'source' and 'type' are mandatory attributes for a CloudEvent. "
+ "Check if the input param is a JSON string for a CloudEvent or an array of it."));
}
}
}
return events;
} catch (UncheckedIOException uncheckedIOException) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("The input parameter isn't a JSON string.",
uncheckedIOException.getCause()));
}
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.id;
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
* @throws NullPointerException if id is null.
* @throws IllegalArgumentException if id is empty.
*/
public CloudEvent setId(String id) {
if (Objects.isNull(id)) {
throw LOGGER.logExceptionAsError(new NullPointerException("id cannot be null"));
}
if (id.isEmpty()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("id cannot be empty"));
}
this.id = id;
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.source;
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (this.binaryData == null) {
if (this.data != null) {
if (this.data instanceof String) {
this.binaryData = BinaryData.fromString((String) this.data);
} else if (this.data instanceof byte[]) {
this.binaryData = BinaryData.fromBytes((byte[]) this.data);
} else {
this.binaryData = BinaryData.fromObject(this.data, SERIALIZER);
}
} else if (this.dataBase64 != null) {
this.binaryData = BinaryData.fromString(this.dataBase64);
}
}
return this.binaryData;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.type;
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.time;
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.time = time;
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.dataContentType;
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.dataSchema;
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.dataSchema = dataSchema;
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.subject;
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.subject = subject;
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
@JsonAnyGetter
public Map<String, Object> getExtensionAttributes() {
return this.extensionAttributes;
}
/**
* Add/Overwrite a single extension attribute to the cloud event.
* @param name the name of the attribute. It must contains only alphanumeric characters and not be be any
* CloudEvent reserved attribute names.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
* @throws IllegalArgumentException if name format isn't correct.
*/
@JsonAnySetter
/**
* Get the spec version. Users don't need to access it because it's always 1.0.
* Make it package level to test deserialization.
* @return The spec version.
*/
String getSpecVersion() {
return this.specVersion;
}
/**
* Set the spec version. Users don't need to access it because it's always 1.0.
* Make it package level to test serialization.
* @return the cloud event itself.
*/
CloudEvent setSpecVersion(String specVersion) {
this.specVersion = specVersion;
return this;
}
private static boolean validateAttributeName(String name) {
if (RESERVED_ATTRIBUTE_NAMES.contains(name)) {
return false;
}
for (int i = 0; i < name.length(); i++) {
char c = name.charAt(i);
if (!((c >= 'a' && c <= 'z') || (c >= '0' && c <= '9'))) {
return false;
}
}
return true;
}
static class JacksonSerializer implements JsonSerializer {
private final JacksonAdapter jacksonAdapter = new JacksonAdapter();
@Override
public <T> T deserialize(InputStream stream, TypeReference<T> typeReference) {
try {
return jacksonAdapter.deserialize(stream, typeReference.getJavaType(), SerializerEncoding.JSON);
} catch (IOException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
}
@Override
public <T> Mono<T> deserializeAsync(InputStream stream, TypeReference<T> typeReference) {
return Mono.defer(() -> Mono.just(deserialize(stream, typeReference)));
}
@Override
public void serialize(OutputStream stream, Object value) {
try {
jacksonAdapter.serialize(value, SerializerEncoding.JSON, stream);
} catch (IOException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
}
@Override
public Mono<Void> serializeAsync(OutputStream stream, Object value) {
return Mono.fromRunnable(() -> serialize(stream, value));
}
JacksonAdapter getJacksonAdapter() {
return jacksonAdapter;
}
}
} | class accepts any String for compatibility with legacy systems.
* @param type Type of event related to the originating occurrence.
* @param data A {@link BinaryData} |
This generated expr is only used for filtering files.The Expr sent to BE is another set of logic. Mainly because this reduction based on struct type has not been fully tested, and the existing DeltaLake implementation does not support it, so keep this logic unchanged for now | private static DeltaDataType getColumnType(String qualifiedName, DeltaLakeContext context) {
StructType structType = context.getSchema();
StructField field = structType.get(qualifiedName);
if (field != null) {
DeltaDataType type = DeltaDataType.instanceFrom(field.getDataType().getClass());
if (type == DeltaDataType.TIMESTAMP && context.isPartitionColumn(qualifiedName)) {
return DeltaDataType.TIMESTAMP_NTZ;
} else {
return type;
}
} else {
return DeltaDataType.OTHER;
}
} | private static DeltaDataType getColumnType(String qualifiedName, DeltaLakeContext context) {
StructType structType = context.getSchema();
StructField field = structType.get(qualifiedName);
if (field != null) {
DeltaDataType type = DeltaDataType.instanceFrom(field.getDataType().getClass());
if (type == DeltaDataType.TIMESTAMP && context.isPartitionColumn(qualifiedName)) {
return DeltaDataType.TIMESTAMP_NTZ;
} else {
return type;
}
} else {
return DeltaDataType.OTHER;
}
} | class DeltaLakeExprVisitor extends ScalarOperatorVisitor<Predicate, DeltaLakeContext> {
private static DeltaDataType getResultType(String columnName, DeltaLakeContext context) {
Preconditions.checkNotNull(context);
return getColumnType(columnName, context);
}
@Override
public Predicate visitCompoundPredicate(CompoundPredicateOperator operator, DeltaLakeContext context) {
CompoundPredicateOperator.CompoundType op = operator.getCompoundType();
if (op == CompoundPredicateOperator.CompoundType.NOT) {
if (operator.getChild(0) instanceof LikePredicateOperator) {
return null;
}
Predicate predicate = operator.getChild(0).accept(this, context);
if (predicate != null) {
return new Predicate("NOT", predicate);
}
} else {
Predicate left = operator.getChild(0).accept(this, context);
Predicate right = operator.getChild(1).accept(this, context);
if (left != null && right != null) {
return (op == CompoundPredicateOperator.CompoundType.OR) ?
new Or(left, right) : new And(left, right);
}
}
return null;
}
@Override
public Predicate visitIsNullPredicate(IsNullPredicateOperator operator, DeltaLakeContext context) {
String columnName = getColumnName(operator.getChild(0));
if (columnName == null) {
return null;
}
Column column = context.getColumn(columnName);
if (operator.isNotNull()) {
return new Predicate("IS_NOT_NULL", column);
} else {
return new Predicate("IS_NULL", column);
}
}
@Override
public Predicate visitBinaryPredicate(BinaryPredicateOperator operator, DeltaLakeContext context) {
String columnName = getColumnName(operator.getChild(0));
if (columnName == null) {
return null;
}
Column column = context.getColumn(columnName);
DeltaDataType resultType = getResultType(columnName, context);
Literal literal = getLiteral(operator.getChild(1), resultType);
if (literal == null) {
return null;
}
switch (operator.getBinaryType()) {
case LT:
return new Predicate("<", column, literal);
case LE:
return new Predicate("<=", column, literal);
case GT:
return new Predicate(">", column, literal);
case GE:
return new Predicate(">=", column, literal);
case EQ:
return new Predicate("=", column, literal);
case NE:
return new Predicate("NOT", new Predicate("=", column, literal));
default:
return null;
}
}
@Override
public Predicate visit(ScalarOperator scalarOperator, DeltaLakeContext context) {
return null;
}
private static Literal getLiteral(ScalarOperator operator, DeltaDataType deltaDataType) {
if (operator == null) {
return null;
}
return operator.accept(new ExtractLiteralValue(), deltaDataType);
}
} | class DeltaLakeExprVisitor extends ScalarOperatorVisitor<Predicate, DeltaLakeContext> {
private static DeltaDataType getResultType(String columnName, DeltaLakeContext context) {
Preconditions.checkNotNull(context);
return getColumnType(columnName, context);
}
@Override
public Predicate visitCompoundPredicate(CompoundPredicateOperator operator, DeltaLakeContext context) {
CompoundPredicateOperator.CompoundType op = operator.getCompoundType();
if (op == CompoundPredicateOperator.CompoundType.NOT) {
if (operator.getChild(0) instanceof LikePredicateOperator) {
return null;
}
Predicate predicate = operator.getChild(0).accept(this, context);
if (predicate != null) {
return new Predicate("NOT", predicate);
}
} else {
Predicate left = operator.getChild(0).accept(this, context);
Predicate right = operator.getChild(1).accept(this, context);
if (left != null && right != null) {
return (op == CompoundPredicateOperator.CompoundType.OR) ?
new Or(left, right) : new And(left, right);
}
}
return null;
}
@Override
public Predicate visitIsNullPredicate(IsNullPredicateOperator operator, DeltaLakeContext context) {
String columnName = getColumnName(operator.getChild(0));
if (columnName == null) {
return null;
}
Column column = context.getColumn(columnName);
if (operator.isNotNull()) {
return new Predicate("IS_NOT_NULL", column);
} else {
return new Predicate("IS_NULL", column);
}
}
@Override
public Predicate visitBinaryPredicate(BinaryPredicateOperator operator, DeltaLakeContext context) {
String columnName = getColumnName(operator.getChild(0));
if (columnName == null) {
return null;
}
Column column = context.getColumn(columnName);
DeltaDataType resultType = getResultType(columnName, context);
Literal literal = getLiteral(operator.getChild(1), resultType);
if (literal == null) {
return null;
}
switch (operator.getBinaryType()) {
case LT:
return new Predicate("<", column, literal);
case LE:
return new Predicate("<=", column, literal);
case GT:
return new Predicate(">", column, literal);
case GE:
return new Predicate(">=", column, literal);
case EQ:
return new Predicate("=", column, literal);
case NE:
return new Predicate("NOT", new Predicate("=", column, literal));
default:
return null;
}
}
@Override
public Predicate visit(ScalarOperator scalarOperator, DeltaLakeContext context) {
return null;
}
private static Literal getLiteral(ScalarOperator operator, DeltaDataType deltaDataType) {
if (operator == null) {
return null;
}
return operator.accept(new ExtractLiteralValue(), deltaDataType);
}
} |
|
I will remove the wrapper and let the CI run... | public Consumer<Route> start() {
List<Handler<RoutingContext>> handlers = new ArrayList<>();
if (hotDeploymentResourcePaths != null && !hotDeploymentResourcePaths.isEmpty()) {
for (Path resourcePath : hotDeploymentResourcePaths) {
String root = resourcePath.toAbsolutePath().toString();
ThreadLocalHandler staticHandler = new ThreadLocalHandler(new Supplier<Handler<RoutingContext>>() {
@Override
public Handler<RoutingContext> get() {
StaticHandler staticHandler = StaticHandler.create();
staticHandler.setCachingEnabled(false);
staticHandler.setAllowRootFileSystemAccess(true);
staticHandler.setWebRoot(root);
staticHandler.setDefaultContentEncoding("UTF-8");
return staticHandler;
}
});
handlers.add(event -> {
try {
staticHandler.handle(event);
} catch (Exception e) {
event.next();
}
});
}
}
if (!knownPaths.isEmpty()) {
ThreadLocalHandler staticHandler = new ThreadLocalHandler(new Supplier<Handler<RoutingContext>>() {
@Override
public Handler<RoutingContext> get() {
return StaticHandler.create(META_INF_RESOURCES)
.setDefaultContentEncoding("UTF-8");
}
});
handlers.add(ctx -> {
String rel = ctx.mountPoint() == null ? ctx.normalisedPath()
: ctx.normalisedPath().substring(ctx.mountPoint().length());
if (knownPaths.contains(rel)) {
staticHandler.handle(ctx);
} else {
ctx.next();
}
});
}
return new Consumer<Route>() {
@Override
public void accept(Route route) {
for (Handler<RoutingContext> i : handlers) {
route.handler(i);
}
}
};
} | ThreadLocalHandler staticHandler = new ThreadLocalHandler(new Supplier<Handler<RoutingContext>>() { | public Consumer<Route> start() {
List<Handler<RoutingContext>> handlers = new ArrayList<>();
if (hotDeploymentResourcePaths != null && !hotDeploymentResourcePaths.isEmpty()) {
for (Path resourcePath : hotDeploymentResourcePaths) {
String root = resourcePath.toAbsolutePath().toString();
StaticHandler staticHandler = StaticHandler.create();
staticHandler.setCachingEnabled(false);
staticHandler.setAllowRootFileSystemAccess(true);
staticHandler.setWebRoot(root);
staticHandler.setDefaultContentEncoding("UTF-8");
handlers.add(event -> {
try {
staticHandler.handle(event);
} catch (Exception e) {
event.next();
}
});
}
}
if (!knownPaths.isEmpty()) {
StaticHandler staticHandler = StaticHandler.create(META_INF_RESOURCES).setDefaultContentEncoding("UTF-8");
handlers.add(ctx -> {
String rel = ctx.mountPoint() == null ? ctx.normalisedPath()
: ctx.normalisedPath().substring(ctx.mountPoint().length());
if (knownPaths.contains(rel)) {
staticHandler.handle(ctx);
} else {
ctx.next();
}
});
}
return new Consumer<Route>() {
@Override
public void accept(Route route) {
for (Handler<RoutingContext> i : handlers) {
route.handler(i);
}
}
};
} | class StaticResourcesRecorder {
public static final String META_INF_RESOURCES = "META-INF/resources";
private static volatile Set<String> knownPaths;
private static volatile List<Path> hotDeploymentResourcePaths;
public static void setHotDeploymentResources(List<Path> resources) {
hotDeploymentResourcePaths = resources;
}
public void staticInit(Set<String> knownPaths) {
StaticResourcesRecorder.knownPaths = knownPaths;
}
} | class StaticResourcesRecorder {
public static final String META_INF_RESOURCES = "META-INF/resources";
private static volatile Set<String> knownPaths;
private static volatile List<Path> hotDeploymentResourcePaths;
public static void setHotDeploymentResources(List<Path> resources) {
hotDeploymentResourcePaths = resources;
}
public void staticInit(Set<String> knownPaths) {
StaticResourcesRecorder.knownPaths = knownPaths;
}
} |
Was using it as a reference when debugging. Should be deleted now. Thanks! | public void testReadFromBigQueryIO() throws Exception {
fakeDatasetService.createDataset("foo.com:project", "dataset", "", "", null);
TableReference tableRef = BigQueryHelpers.parseTableSpec("foo.com:project:dataset.table");
Table table =
new Table().setTableReference(tableRef).setNumBytes(10L).setSchema(new TableSchema());
fakeDatasetService.createTable(table);
CreateReadSessionRequest expectedCreateReadSessionRequest =
CreateReadSessionRequest.newBuilder()
.setParent("projects/project-id")
.setTableReference(BigQueryHelpers.toTableRefProto(tableRef))
.setRequestedStreams(10)
.setReadOptions(
TableReadOptions.newBuilder().addSelectedFields("name").addSelectedFields("number"))
.setShardingStrategy(ShardingStrategy.BALANCED)
.build();
ReadSession readSession =
ReadSession.newBuilder()
.setName("readSessionName")
.setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING))
.addStreams(Stream.newBuilder().setName("streamName"))
.build();
ReadRowsRequest expectedReadRowsRequest =
ReadRowsRequest.newBuilder()
.setReadPosition(
StreamPosition.newBuilder().setStream(Stream.newBuilder().setName("streamName")))
.build();
/*
"{\"namespace\": \"example.avro\",\n"
+ " \"type\": \"record\",\n"
+ " \"name\": \"RowRecord\",\n"
+ " \"fields\": [\n"
+ " {\"name\": \"name\", \"type\": \"string\"},\n"
+ " {\"name\": \"number\", \"type\": \"long\"}\n"
+ " ]\n"
+ "}";
*/
List<GenericRecord> records =
Lists.newArrayList(
createRecord("A", 1, AVRO_SCHEMA),
createRecord("B", 2, AVRO_SCHEMA),
createRecord("C", 3, AVRO_SCHEMA),
createRecord("D", 4, AVRO_SCHEMA));
List<ReadRowsResponse> readRowsResponses =
Lists.newArrayList(
createResponse(AVRO_SCHEMA, records.subList(0, 2), 0.50),
createResponse(AVRO_SCHEMA, records.subList(2, 4), 0.75));
StorageClient fakeStorageClient = mock(StorageClient.class, withSettings().serializable());
when(fakeStorageClient.createReadSession(expectedCreateReadSessionRequest))
.thenReturn(readSession);
when(fakeStorageClient.readRows(expectedReadRowsRequest))
.thenReturn(new FakeBigQueryServerStream<>(readRowsResponses));
PCollection<KV<String, Long>> output =
p.apply(
BigQueryIO.read(new ParseKeyValue())
.from("foo.com:project:dataset.table")
.withMethod(Method.DIRECT_READ)
.withSelectedFields(p.newProvider(Lists.newArrayList("name", "number")))
.withTestServices(
new FakeBigQueryServices()
.withDatasetService(fakeDatasetService)
.withStorageClient(fakeStorageClient)));
PAssert.that(output)
.containsInAnyOrder(
ImmutableList.of(KV.of("A", 1L), KV.of("B", 2L), KV.of("C", 3L), KV.of("D", 4L)));
p.run();
} | /* | public void testReadFromBigQueryIO() throws Exception {
fakeDatasetService.createDataset("foo.com:project", "dataset", "", "", null);
TableReference tableRef = BigQueryHelpers.parseTableSpec("foo.com:project:dataset.table");
Table table =
new Table().setTableReference(tableRef).setNumBytes(10L).setSchema(new TableSchema());
fakeDatasetService.createTable(table);
CreateReadSessionRequest expectedCreateReadSessionRequest =
CreateReadSessionRequest.newBuilder()
.setParent("projects/project-id")
.setTableReference(BigQueryHelpers.toTableRefProto(tableRef))
.setRequestedStreams(10)
.setReadOptions(
TableReadOptions.newBuilder().addSelectedFields("name").addSelectedFields("number"))
.setShardingStrategy(ShardingStrategy.BALANCED)
.build();
ReadSession readSession =
ReadSession.newBuilder()
.setName("readSessionName")
.setAvroSchema(AvroSchema.newBuilder().setSchema(AVRO_SCHEMA_STRING))
.addStreams(Stream.newBuilder().setName("streamName"))
.build();
ReadRowsRequest expectedReadRowsRequest =
ReadRowsRequest.newBuilder()
.setReadPosition(
StreamPosition.newBuilder().setStream(Stream.newBuilder().setName("streamName")))
.build();
List<GenericRecord> records =
Lists.newArrayList(
createRecord("A", 1, AVRO_SCHEMA),
createRecord("B", 2, AVRO_SCHEMA),
createRecord("C", 3, AVRO_SCHEMA),
createRecord("D", 4, AVRO_SCHEMA));
List<ReadRowsResponse> readRowsResponses =
Lists.newArrayList(
createResponse(AVRO_SCHEMA, records.subList(0, 2), 0.50),
createResponse(AVRO_SCHEMA, records.subList(2, 4), 0.75));
StorageClient fakeStorageClient = mock(StorageClient.class, withSettings().serializable());
when(fakeStorageClient.createReadSession(expectedCreateReadSessionRequest))
.thenReturn(readSession);
when(fakeStorageClient.readRows(expectedReadRowsRequest))
.thenReturn(new FakeBigQueryServerStream<>(readRowsResponses));
PCollection<KV<String, Long>> output =
p.apply(
BigQueryIO.read(new ParseKeyValue())
.from("foo.com:project:dataset.table")
.withMethod(Method.DIRECT_READ)
.withSelectedFields(p.newProvider(Lists.newArrayList("name", "number")))
.withTestServices(
new FakeBigQueryServices()
.withDatasetService(fakeDatasetService)
.withStorageClient(fakeStorageClient)));
PAssert.that(output)
.containsInAnyOrder(
ImmutableList.of(KV.of("A", 1L), KV.of("B", 2L), KV.of("C", 3L), KV.of("D", 4L)));
p.run();
} | class ParseKeyValue
implements SerializableFunction<SchemaAndRecord, KV<String, Long>> {
@Override
public KV<String, Long> apply(SchemaAndRecord input) {
return KV.of(
input.getRecord().get("name").toString(), (Long) input.getRecord().get("number"));
}
} | class ParseKeyValue
implements SerializableFunction<SchemaAndRecord, KV<String, Long>> {
@Override
public KV<String, Long> apply(SchemaAndRecord input) {
return KV.of(
input.getRecord().get("name").toString(), (Long) input.getRecord().get("number"));
}
} |
We need to keep this since it has been used to keep track of function call depths. | public static void autoClose(BString[] channelIds) {
Strand currentStrand = Scheduler.getStrand();
Strand channelHoldingStrand = Objects.requireNonNullElse(currentStrand.parent, currentStrand);
for (BString channelId : channelIds) {
String channelName = getMatchingChannelName(channelId.getValue(), currentStrand);
WorkerDataChannel workerDataChannel = channelHoldingStrand.wdChannels.getWorkerDataChannel(channelName);
workerDataChannel.autoClose();
}
} | for (BString channelId : channelIds) { | public static void autoClose(BString[] channelIds) {
Strand currentStrand = Scheduler.getStrand();
Strand channelHoldingStrand = Objects.requireNonNullElse(currentStrand.parent, currentStrand);
for (BString channelId : channelIds) {
String channelName = getMatchingChannelName(channelId.getValue(), currentStrand);
WorkerDataChannel workerDataChannel = channelHoldingStrand.wdChannels.getWorkerDataChannel(channelName);
workerDataChannel.autoClose();
}
} | class WorkerChannels {
/**
* Auto-closes the specified worker channels if they exist; otherwise, closes them upon creation.
*
* @param channelIds channel IDs of the channels to be closed
*/
private static String getMatchingChannelName(String channelId, Strand currentStrand) {
String channelName = null;
for (ChannelDetails channelDetail : currentStrand.channelDetails) {
if (channelDetail.name.contains(channelId)) {
channelName = channelDetail.name;
break;
}
}
return channelName;
}
} | class WorkerChannels {
/**
* Auto-closes the specified worker channels if they exist; otherwise, closes them upon creation.
*
* @param channelIds channel IDs of the channels to be closed
*/
private static String getMatchingChannelName(String channelId, Strand currentStrand) {
String channelName = null;
for (ChannelDetails channelDetail : currentStrand.channelDetails) {
if (channelDetail.name.contains(channelId)) {
channelName = channelDetail.name;
break;
}
}
return channelName;
}
} |
Any reason to choose `System.nanoTime()` vs `System.currentTimeMillis()`? | private void executeBatch() throws SQLException, IOException, InterruptedException {
if (records.isEmpty()) {
return;
}
Long startTimeNs = System.nanoTime();
if (connection == null) {
connection = dataSource.getConnection();
connection.setAutoCommit(false);
preparedStatement = connection.prepareStatement(spec.getStatement().get());
}
Sleeper sleeper = Sleeper.DEFAULT;
BackOff backoff = retryBackOff.backoff();
while (true) {
try (PreparedStatement preparedStatement =
connection.prepareStatement(spec.getStatement().get())) {
try {
for (T record : records) {
processRecord(record, preparedStatement);
}
preparedStatement.executeBatch();
connection.commit();
RECORDS_PER_BATCH.update(records.size());
MS_PER_BATCH.update((System.nanoTime() - startTimeNs) / 1000);
break;
} catch (SQLException exception) {
if (!spec.getRetryStrategy().apply(exception)) {
throw exception;
}
LOG.warn("Deadlock detected, retrying", exception);
preparedStatement.clearBatch();
connection.rollback();
if (!BackOffUtils.next(sleeper, backoff)) {
throw exception;
}
}
}
}
records.clear();
} | Long startTimeNs = System.nanoTime(); | private void executeBatch() throws SQLException, IOException, InterruptedException {
if (records.isEmpty()) {
return;
}
Long startTimeNs = System.nanoTime();
if (connection == null) {
connection = dataSource.getConnection();
connection.setAutoCommit(false);
preparedStatement = connection.prepareStatement(spec.getStatement().get());
}
Sleeper sleeper = Sleeper.DEFAULT;
BackOff backoff = retryBackOff.backoff();
while (true) {
try (PreparedStatement preparedStatement =
connection.prepareStatement(spec.getStatement().get())) {
try {
for (T record : records) {
processRecord(record, preparedStatement);
}
preparedStatement.executeBatch();
connection.commit();
RECORDS_PER_BATCH.update(records.size());
MS_PER_BATCH.update(TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs));
break;
} catch (SQLException exception) {
if (!spec.getRetryStrategy().apply(exception)) {
throw exception;
}
LOG.warn("Deadlock detected, retrying", exception);
preparedStatement.clearBatch();
connection.rollback();
if (!BackOffUtils.next(sleeper, backoff)) {
throw exception;
}
}
}
}
records.clear();
} | class WriteFn<T> extends DoFn<T, Void> {
private static final Distribution RECORDS_PER_BATCH =
Metrics.distribution(WriteFn.class, "records_per_jdbc_batch");
private static final Distribution MS_PER_BATCH =
Metrics.distribution(WriteFn.class, "milliseconds_per_batch");
private final WriteVoid<T> spec;
private DataSource dataSource;
private Connection connection;
private PreparedStatement preparedStatement;
private final List<T> records = new ArrayList<>();
private static FluentBackoff retryBackOff;
public WriteFn(WriteVoid<T> spec) {
this.spec = spec;
}
@Setup
public void setup() {
dataSource = spec.getDataSourceProviderFn().apply(null);
RetryConfiguration retryConfiguration = spec.getRetryConfiguration();
retryBackOff =
FluentBackoff.DEFAULT
.withInitialBackoff(retryConfiguration.getInitialDuration())
.withMaxCumulativeBackoff(retryConfiguration.getMaxDuration())
.withMaxRetries(retryConfiguration.getMaxAttempts());
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
T record = context.element();
records.add(record);
if (records.size() >= spec.getBatchSize()) {
executeBatch();
}
}
private void processRecord(T record, PreparedStatement preparedStatement) {
try {
preparedStatement.clearParameters();
spec.getPreparedStatementSetter().setParameters(record, preparedStatement);
preparedStatement.addBatch();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@FinishBundle
public void finishBundle() throws Exception {
executeBatch();
cleanUpStatementAndConnection();
}
@Override
protected void finalize() throws Throwable {
cleanUpStatementAndConnection();
}
private void cleanUpStatementAndConnection() throws Exception {
try {
if (preparedStatement != null) {
try {
preparedStatement.close();
} finally {
preparedStatement = null;
}
}
} finally {
if (connection != null) {
try {
connection.close();
} finally {
connection = null;
}
}
}
}
} | class WriteFn<T> extends DoFn<T, Void> {
private static final Distribution RECORDS_PER_BATCH =
Metrics.distribution(WriteFn.class, "records_per_jdbc_batch");
private static final Distribution MS_PER_BATCH =
Metrics.distribution(WriteFn.class, "milliseconds_per_batch");
private final WriteVoid<T> spec;
private DataSource dataSource;
private Connection connection;
private PreparedStatement preparedStatement;
private final List<T> records = new ArrayList<>();
private static FluentBackoff retryBackOff;
public WriteFn(WriteVoid<T> spec) {
this.spec = spec;
}
@Setup
public void setup() {
dataSource = spec.getDataSourceProviderFn().apply(null);
RetryConfiguration retryConfiguration = spec.getRetryConfiguration();
retryBackOff =
FluentBackoff.DEFAULT
.withInitialBackoff(retryConfiguration.getInitialDuration())
.withMaxCumulativeBackoff(retryConfiguration.getMaxDuration())
.withMaxRetries(retryConfiguration.getMaxAttempts());
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
T record = context.element();
records.add(record);
if (records.size() >= spec.getBatchSize()) {
executeBatch();
}
}
private void processRecord(T record, PreparedStatement preparedStatement) {
try {
preparedStatement.clearParameters();
spec.getPreparedStatementSetter().setParameters(record, preparedStatement);
preparedStatement.addBatch();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@FinishBundle
public void finishBundle() throws Exception {
executeBatch();
cleanUpStatementAndConnection();
}
@Override
protected void finalize() throws Throwable {
cleanUpStatementAndConnection();
}
private void cleanUpStatementAndConnection() throws Exception {
try {
if (preparedStatement != null) {
try {
preparedStatement.close();
} finally {
preparedStatement = null;
}
}
} finally {
if (connection != null) {
try {
connection.close();
} finally {
connection = null;
}
}
}
}
} |
The cast to function will only return IntLiteral or LargeIntLiteral. So the only possibility of data overflow is when Largeint is converted to Int. Obviously, the checkValueValid function has already been called when Largeint cast to int, so there is no need to check again here. | public Expr apply(Expr expr, Analyzer analyzer, ClauseType clauseType) throws AnalysisException {
if (!(expr instanceof InPredicate)) {
return expr;
}
InPredicate inPredicate = (InPredicate) expr;
if (inPredicate.contains(Subquery.class) || !inPredicate.isLiteralChildren() || inPredicate.isNotIn()
|| !(inPredicate.getChild(0).unwrapExpr(false) instanceof SlotRef)) {
return expr;
}
SlotRef slotRef = inPredicate.getChild(0).getSrcSlotRef();
Type columnType = slotRef.getColumn().getType();
if (!columnType.isFixedPointType()) {
return expr;
}
InPredicate newInPredicate = inPredicate.clone();
boolean isCast = false;
List<Expr> invalidChildren = Lists.newArrayList();
for (int i = 1; i < newInPredicate.getChildren().size(); ++i) {
LiteralExpr childExpr = (LiteralExpr) newInPredicate.getChild(i);
if (childExpr.getType().getPrimitiveType().equals(columnType.getPrimitiveType())) {
continue;
}
if (childExpr.getType().getPrimitiveType().isCharFamily() || childExpr.getType().isFloatingPointType()) {
try {
childExpr = (LiteralExpr) childExpr.castTo(Type.DECIMALV2);
} catch (Exception e) {
newInPredicate.setChild(i, childExpr);
invalidChildren.add(childExpr);
continue;
}
}
if (childExpr.getType().isNumericType()) {
try {
LiteralExpr newExpr = (LiteralExpr) childExpr.castTo(columnType);
newExpr.checkValueValid();
if (childExpr.compareLiteral(newExpr) == 0) {
newInPredicate.setChild(i, newExpr);
isCast = true;
} else {
throw new AnalysisException("Converting the type will result in a loss of accuracy.");
}
} catch (Exception e) {
newInPredicate.setChild(i, childExpr);
invalidChildren.add(childExpr);
}
} else {
return expr;
}
}
if (invalidChildren.size() == newInPredicate.getChildren().size() - 1) {
return new BoolLiteral(false);
}
if (newInPredicate.getChild(0).getType().getPrimitiveType() != columnType.getPrimitiveType()) {
newInPredicate.castChild(columnType, 0);
isCast = true;
}
newInPredicate.getChildren().removeAll(invalidChildren);
return !isCast && invalidChildren.isEmpty() ? expr : newInPredicate;
} | newExpr.checkValueValid(); | public Expr apply(Expr expr, Analyzer analyzer, ClauseType clauseType) throws AnalysisException {
if (!(expr instanceof InPredicate)) {
return expr;
}
InPredicate inPredicate = (InPredicate) expr;
SlotRef slotRef;
if (inPredicate.contains(Subquery.class) || !inPredicate.isLiteralChildren() || inPredicate.isNotIn()
|| !(inPredicate.getChild(0).unwrapExpr(false) instanceof SlotRef)
|| (slotRef = inPredicate.getChild(0).getSrcSlotRef()) == null || slotRef.getColumn() == null) {
return expr;
}
Type columnType = slotRef.getColumn().getType();
if (!columnType.isFixedPointType()) {
return expr;
}
Expr newColumnExpr = expr.getChild(0).getType().getPrimitiveType() == columnType.getPrimitiveType()
? expr.getChild(0) : expr.getChild(0).castTo(columnType);
List<Expr> newInList = Lists.newArrayList();
boolean isCast = false;
for (int i = 1; i < inPredicate.getChildren().size(); ++i) {
LiteralExpr childExpr = (LiteralExpr) inPredicate.getChild(i);
if (!(childExpr.getType().isNumericType() || childExpr.getType().getPrimitiveType().isCharFamily())) {
return expr;
}
if (childExpr.getType().getPrimitiveType().equals(columnType.getPrimitiveType())) {
newInList.add(childExpr);
continue;
}
if (childExpr.getType().getPrimitiveType().isCharFamily() || childExpr.getType().isFloatingPointType()) {
try {
childExpr = (LiteralExpr) childExpr.castTo(Type.DECIMALV2);
} catch (AnalysisException e) {
continue;
}
}
try {
LiteralExpr newExpr = (LiteralExpr) childExpr.castTo(columnType);
if (childExpr.compareLiteral(newExpr) == 0) {
isCast = true;
newInList.add(newExpr);
}
} catch (AnalysisException ignored) {
}
}
if (newInList.isEmpty()) {
return new BoolLiteral(false);
}
return newInList.size() + 1 < expr.getChildren().size() || isCast
? new InPredicate(newColumnExpr, newInList, false) : expr;
} | class RewriteInPredicateRule implements ExprRewriteRule {
public static ExprRewriteRule INSTANCE = new RewriteInPredicateRule();
@Override
} | class RewriteInPredicateRule implements ExprRewriteRule {
public static ExprRewriteRule INSTANCE = new RewriteInPredicateRule();
@Override
} |
```suggestion boolean methodCallOnVarRef = expr != null; ``` Why do we need this(`expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF`) check. Here kind of `expr` is always `SIMPLE_VARIABLE_REF`. right? | private boolean isGlobalVarsInitialized(Location pos, BLangInvocation invocation) {
if (env.isModuleInit) {
boolean isFirstUninitializedField = true;
StringBuilder uninitializedFields = new StringBuilder();
BLangExpression expr = invocation.expr;
boolean methodCallOnVarRef = expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF;
for (BSymbol symbol : this.uninitializedVars.keySet()) {
if (symbol.owner.getKind() != SymbolKind.PACKAGE || symbol == invocation.symbol ||
(methodCallOnVarRef && ((BLangSimpleVarRef) expr).symbol == symbol)) {
continue;
}
if (isFirstUninitializedField) {
uninitializedFields = new StringBuilder(symbol.getName().value);
isFirstUninitializedField = false;
} else {
uninitializedFields.append(", ").append(symbol.getName().value);
}
}
if (uninitializedFields.length() != 0) {
this.dlog.error(pos, DiagnosticErrorCode.INVALID_FUNCTION_CALL_WITH_UNINITIALIZED_VARIABLES,
uninitializedFields.toString());
return false;
}
}
return true;
} | boolean methodCallOnVarRef = expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF; | private boolean isGlobalVarsInitialized(Location pos, BLangInvocation invocation) {
if (env.isModuleInit) {
boolean isFirstUninitializedField = true;
StringBuilder uninitializedFields = new StringBuilder();
BLangExpression expr = invocation.expr;
boolean methodCallOnVarRef = expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF;
for (BSymbol symbol : this.uninitializedVars.keySet()) {
if (symbol.owner.getKind() != SymbolKind.PACKAGE || symbol == invocation.symbol ||
(methodCallOnVarRef && ((BLangSimpleVarRef) expr).symbol == symbol)) {
continue;
}
if (isFirstUninitializedField) {
uninitializedFields = new StringBuilder(symbol.getName().value);
isFirstUninitializedField = false;
} else {
uninitializedFields.append(", ").append(symbol.getName().value);
}
}
if (uninitializedFields.length() != 0) {
this.dlog.error(pos, DiagnosticErrorCode.INVALID_FUNCTION_CALL_WITH_UNINITIALIZED_VARIABLES,
uninitializedFields.toString());
return false;
}
}
return true;
} | class DataflowAnalyzer extends BLangNodeVisitor {
private final SymbolResolver symResolver;
private final Names names;
private SymbolEnv env;
private SymbolTable symTable;
private BLangDiagnosticLog dlog;
private Types types;
private Map<BSymbol, InitStatus> uninitializedVars;
private Map<BSymbol, Location> unusedErrorVarsDeclaredWithVar;
private Map<BSymbol, Location> unusedLocalVariables;
private Map<BSymbol, Set<BSymbol>> globalNodeDependsOn;
private Map<BSymbol, Set<BSymbol>> functionToDependency;
private boolean flowTerminated = false;
private static final CompilerContext.Key<DataflowAnalyzer> DATAFLOW_ANALYZER_KEY = new CompilerContext.Key<>();
private Deque<BSymbol> currDependentSymbolDeque;
private final GlobalVariableRefAnalyzer globalVariableRefAnalyzer;
private DataflowAnalyzer(CompilerContext context) {
context.put(DATAFLOW_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.names = Names.getInstance(context);
this.currDependentSymbolDeque = new ArrayDeque<>();
this.globalVariableRefAnalyzer = GlobalVariableRefAnalyzer.getInstance(context);
this.unusedLocalVariables = new HashMap<>();
}
public static DataflowAnalyzer getInstance(CompilerContext context) {
DataflowAnalyzer dataflowAnalyzer = context.get(DATAFLOW_ANALYZER_KEY);
if (dataflowAnalyzer == null) {
dataflowAnalyzer = new DataflowAnalyzer(context);
}
return dataflowAnalyzer;
}
/**
* Perform data-flow analysis on a package.
*
* @param pkgNode Package to perform data-flow analysis.
* @return Data-flow analyzed package
*/
public BLangPackage analyze(BLangPackage pkgNode) {
this.uninitializedVars = new LinkedHashMap<>();
this.globalNodeDependsOn = new LinkedHashMap<>();
this.functionToDependency = new HashMap<>();
this.dlog.setCurrentPackageId(pkgNode.packageID);
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);
analyzeNode(pkgNode, pkgEnv);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DATAFLOW_ANALYZE)) {
return;
}
Map<BSymbol, Location> prevUnusedErrorVarsDeclaredWithVar = this.unusedErrorVarsDeclaredWithVar;
this.unusedErrorVarsDeclaredWithVar = new HashMap<>();
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
List<TopLevelNode> sortedListOfNodes = new ArrayList<>(pkgNode.globalVars);
addModuleInitToSortedNodeList(pkgNode, sortedListOfNodes);
addNodesToSortedNodeList(pkgNode, sortedListOfNodes);
for (TopLevelNode topLevelNode : sortedListOfNodes) {
if (isModuleInitFunction((BLangNode) topLevelNode)) {
analyzeModuleInitFunc((BLangFunction) topLevelNode);
} else {
if (topLevelNode.getKind() == NodeKind.CLASS_DEFN) {
BLangClassDefinition classDef = (BLangClassDefinition) topLevelNode;
if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
continue;
}
}
analyzeNode((BLangNode) topLevelNode, env);
}
}
checkForUninitializedGlobalVars(pkgNode.globalVars);
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
this.globalVariableRefAnalyzer.analyzeAndReOrder(pkgNode, this.globalNodeDependsOn);
this.globalVariableRefAnalyzer.populateFunctionDependencies(this.functionToDependency, pkgNode.globalVars);
pkgNode.globalVariableDependencies = globalVariableRefAnalyzer.getGlobalVariablesDependsOn();
checkUnusedImports(pkgNode.imports);
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
checkUnusedErrorVarsDeclaredWithVar();
this.unusedErrorVarsDeclaredWithVar = prevUnusedErrorVarsDeclaredWithVar;
pkgNode.completedPhases.add(CompilerPhase.DATAFLOW_ANALYZE);
}
private void addModuleInitToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) {
for (TopLevelNode node : pkgNode.topLevelNodes) {
if (isModuleInitFunction((BLangNode) node)) {
sortedListOfNodes.add(node);
break;
}
}
}
private void addNodesToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) {
pkgNode.topLevelNodes.forEach(topLevelNode -> {
if (!sortedListOfNodes.contains(topLevelNode)) {
sortedListOfNodes.add(topLevelNode);
}
});
}
private boolean isModuleInitFunction(BLangNode node) {
return node.getKind() == NodeKind.FUNCTION &&
Names.USER_DEFINED_INIT_SUFFIX.value.equals(((BLangFunction) node).name.value);
}
private void analyzeModuleInitFunc(BLangFunction funcNode) {
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
this.currDependentSymbolDeque.push(funcNode.symbol);
SymbolEnv moduleInitFuncEnv = SymbolEnv.createModuleInitFunctionEnv(funcNode, funcNode.symbol.scope, env);
for (BLangAnnotationAttachment bLangAnnotationAttachment : funcNode.annAttachments) {
analyzeNode(bLangAnnotationAttachment.expr, env);
}
analyzeNode(funcNode.body, moduleInitFuncEnv);
this.currDependentSymbolDeque.pop();
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
}
private void checkForUninitializedGlobalVars(List<BLangVariable> globalVars) {
for (BLangVariable globalVar : globalVars) {
if (globalVar.getKind() == NodeKind.VARIABLE && this.uninitializedVars.containsKey(globalVar.symbol)) {
this.dlog.error(globalVar.pos, DiagnosticErrorCode.UNINITIALIZED_VARIABLE, globalVar.symbol);
}
}
}
@Override
public void visit(BLangResourceFunction funcNode) {
visit((BLangFunction) funcNode);
}
@Override
public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
this.currDependentSymbolDeque.push(funcNode.symbol);
funcNode.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env));
funcNode.requiredParams.forEach(param -> analyzeNode(param, funcEnv));
analyzeNode(funcNode.restParam, funcEnv);
if (funcNode.flagSet.contains(Flag.OBJECT_CTOR)) {
visitFunctionBodyWithDynamicEnv(funcNode, funcEnv);
} else {
analyzeBranch(funcNode.body, funcEnv);
}
this.currDependentSymbolDeque.pop();
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
}
private void visitFunctionBodyWithDynamicEnv(BLangFunction funcNode, SymbolEnv funcEnv) {
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
this.unusedLocalVariables.putAll(prevUnusedLocalVariables);
Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars;
this.uninitializedVars = copyUninitializedVars();
this.flowTerminated = false;
analyzeNode(funcNode.body, funcEnv);
this.uninitializedVars = prevUninitializedVars;
prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol));
this.unusedLocalVariables.keySet().removeAll(prevUnusedLocalVariables.keySet());
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
}
@Override
public void visit(BLangBlockFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
bodyEnv.isModuleInit = env.isModuleInit;
for (BLangStatement statement : body.stmts) {
analyzeNode(statement, bodyEnv);
}
}
@Override
public void visit(BLangExprFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
analyzeNode(body.expr, bodyEnv);
}
@Override
public void visit(BLangExternalFunctionBody body) {
}
@Override
public void visit(BLangBlockStmt blockNode) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);
blockNode.stmts.forEach(statement -> analyzeNode(statement, blockEnv));
}
@Override
public void visit(BLangLetExpression letExpression) {
for (BLangLetVariable letVarDeclaration : letExpression.letVarDeclarations) {
analyzeNode((BLangNode) letVarDeclaration.definitionNode, letExpression.env);
}
analyzeNode(letExpression.expr, letExpression.env);
}
@Override
public void visit(BLangCompilationUnit compUnit) {
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
}
@Override
public void visit(BLangService service) {
this.currDependentSymbolDeque.push(service.serviceClass.symbol);
visit(service.serviceClass);
for (BLangExpression attachedExpr : service.attachedExprs) {
analyzeNode(attachedExpr, env);
}
service.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env));
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
SymbolEnv typeDefEnv;
BSymbol symbol = typeDefinition.symbol;
if (typeDefinition.symbol.kind == SymbolKind.TYPE_DEF) {
symbol = symbol.type.tsymbol;
}
typeDefEnv = SymbolEnv.createTypeEnv(typeDefinition.typeNode, symbol.scope, env);
this.currDependentSymbolDeque.push(symbol);
analyzeNode(typeDefinition.typeNode, typeDefEnv);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangClassDefinition classDef) {
SymbolEnv preEnv = env;
SymbolEnv env = this.env;
Map<BSymbol, Location> prevUnusedLocalVariables = null;
Map<BSymbol, InitStatus> prevUninitializedVars = null;
boolean visitedOCE = false;
if (classDef.flagSet.contains(Flag.OBJECT_CTOR) && classDef.oceEnvData.capturedClosureEnv != null &&
classDef.oceEnvData.capturedClosureEnv.enclEnv != null) {
env = classDef.oceEnvData.capturedClosureEnv.enclEnv;
prevUnusedLocalVariables = this.unusedLocalVariables;
prevUninitializedVars = this.uninitializedVars;
this.unusedLocalVariables = new HashMap<>();
this.unusedLocalVariables.putAll(prevUnusedLocalVariables);
this.uninitializedVars = copyUninitializedVars();
this.flowTerminated = false;
visitedOCE = true;
}
SymbolEnv objectEnv = SymbolEnv.createClassEnv(classDef, classDef.symbol.scope, env);
this.currDependentSymbolDeque.push(classDef.symbol);
for (BLangAnnotationAttachment bLangAnnotationAttachment : classDef.annAttachments) {
analyzeNode(bLangAnnotationAttachment.expr, env);
}
classDef.fields.forEach(field -> analyzeNode(field, objectEnv));
classDef.referencedFields.forEach(field -> analyzeNode(field, objectEnv));
if (classDef.initFunction != null) {
if (classDef.initFunction.body == null) {
Optional<BLangFunction> outerFuncDef =
objectEnv.enclPkg.functions.stream()
.filter(f -> f.symbol.name.equals((classDef.initFunction).symbol.name))
.findFirst();
outerFuncDef.ifPresent(bLangFunction -> classDef.initFunction = bLangFunction);
}
if (classDef.initFunction.body != null) {
Map<BSymbol, Location> prevUnusedLocalVars = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
if (classDef.initFunction.body.getKind() == NodeKind.BLOCK_FUNCTION_BODY) {
for (BLangStatement statement :
((BLangBlockFunctionBody) classDef.initFunction.body).stmts) {
analyzeNode(statement, objectEnv);
}
} else if (classDef.initFunction.body.getKind() == NodeKind.EXPR_FUNCTION_BODY) {
analyzeNode(((BLangExprFunctionBody) classDef.initFunction.body).expr, objectEnv);
}
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVars;
}
}
Stream.concat(classDef.fields.stream(), classDef.referencedFields.stream())
.map(field -> {
addTypeDependency(classDef.symbol, field.getBType(), new HashSet<>());
return field; })
.filter(field -> !Symbols.isPrivate(field.symbol))
.forEach(field -> {
if (this.uninitializedVars.containsKey(field.symbol)) {
this.dlog.error(field.pos, DiagnosticErrorCode.OBJECT_UNINITIALIZED_FIELD, field.symbol);
}
});
for (BLangFunction function : classDef.functions) {
analyzeNode(function, env);
}
for (BLangType type : classDef.typeRefs) {
analyzeNode(type, env);
}
this.env = preEnv;
if (visitedOCE) {
this.uninitializedVars = prevUninitializedVars;
prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol));
this.unusedLocalVariables = prevUnusedLocalVariables;
}
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangObjectConstructorExpression objectConstructorExpression) {
BLangClassDefinition classDef = objectConstructorExpression.classNode;
if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
OCEDynamicEnvironmentData oceData = classDef.oceEnvData;
for (BSymbol symbol : oceData.closureFuncSymbols) {
this.unusedLocalVariables.remove(symbol);
}
for (BSymbol symbol : oceData.closureBlockSymbols) {
this.unusedLocalVariables.remove(symbol);
}
}
visit(objectConstructorExpression.classNode);
visit(objectConstructorExpression.typeInit);
addDependency(objectConstructorExpression.getBType().tsymbol, objectConstructorExpression.classNode.symbol);
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
BLangSimpleVariable var = varDefNode.var;
if (var.expr == null) {
addUninitializedVar(var);
analyzeNode(var.typeNode, env);
BVarSymbol symbol = var.symbol;
if (var.getKind() == NodeKind.VARIABLE && isLocalVariableDefinedWithNonWildCardBindingPattern(var)) {
this.unusedLocalVariables.put(symbol, var.pos);
}
return;
}
analyzeNode(var, env);
}
@Override
public void visit(BLangSimpleVariable variable) {
BVarSymbol symbol = variable.symbol;
analyzeNode(variable.typeNode, env);
if (symbol == null) {
if (variable.expr != null) {
analyzeNode(variable.expr, env);
}
return;
}
this.currDependentSymbolDeque.push(symbol);
if (variable.typeNode != null && variable.typeNode.getBType() != null) {
BType type = variable.typeNode.getBType();
recordGlobalVariableReferenceRelationship(Types.getReferredType(type).tsymbol);
}
boolean withInModuleVarLetExpr = symbol.owner.tag == SymTag.LET && isGlobalVarSymbol(env.enclVarSym);
if (withInModuleVarLetExpr) {
BVarSymbol dependentVar = env.enclVarSym;
this.currDependentSymbolDeque.push(dependentVar);
}
try {
boolean varWithInferredTypeIncludingError = false;
if (variable.isDeclaredWithVar) {
varWithInferredTypeIncludingError = addVarIfInferredTypeIncludesError(variable);
}
if (!varWithInferredTypeIncludingError &&
isLocalVariableDefinedWithNonWildCardBindingPattern(variable) &&
!isVariableDeclaredForWorkerDeclaration(variable)) {
this.unusedLocalVariables.put(symbol, variable.pos);
}
if (variable.expr != null) {
analyzeNode(variable.expr, env);
this.uninitializedVars.remove(symbol);
return;
}
long varFlags = symbol.flags;
if (Symbols.isFlagOn(varFlags, Flags.CONFIGURABLE) && Symbols.isFlagOn(varFlags, Flags.REQUIRED)) {
return;
}
BSymbol owner = symbol.owner;
if (owner.tag != SymTag.PACKAGE && owner.tag != SymTag.OBJECT) {
return;
}
addUninitializedVar(variable);
} finally {
if (withInModuleVarLetExpr) {
this.currDependentSymbolDeque.pop();
}
this.currDependentSymbolDeque.pop();
}
}
private boolean isVariableDeclaredForWorkerDeclaration(BLangSimpleVariable variable) {
BLangExpression expr = variable.expr;
if (expr == null) {
return false;
}
if (Symbols.isFlagOn(variable.symbol.flags, Flags.WORKER)) {
return true;
}
return expr.getKind() == NodeKind.LAMBDA && ((BLangLambdaFunction) expr).function.flagSet.contains(Flag.WORKER);
}
@Override
public void visit(BLangAssignment assignment) {
analyzeNode(assignment.expr, env);
checkAssignment(assignment.varRef);
}
@Override
public void visit(BLangCompoundAssignment compoundAssignNode) {
analyzeNode(compoundAssignNode.expr, env);
analyzeNode(compoundAssignNode.varRef, env);
checkAssignment(compoundAssignNode.varRef);
this.uninitializedVars.remove(compoundAssignNode.varRef.symbol);
}
@Override
public void visit(BLangBreak breakNode) {
terminateFlow();
}
@Override
public void visit(BLangReturn returnNode) {
analyzeNode(returnNode.expr, env);
terminateFlow();
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmt) {
analyzeNode(xmlnsStmt.xmlnsDecl, env);
}
@Override
public void visit(BLangIf ifNode) {
analyzeNode(ifNode.expr, env);
BranchResult ifResult = analyzeBranch(ifNode.body, env);
BranchResult elseResult = analyzeBranch(ifNode.elseStmt, env);
if (ifResult.flowTerminated) {
this.uninitializedVars = elseResult.uninitializedVars;
return;
}
if (elseResult.flowTerminated ||
ConditionResolver.checkConstCondition(types, symTable, ifNode.expr) == symTable.trueType) {
this.uninitializedVars = ifResult.uninitializedVars;
return;
}
this.uninitializedVars = mergeUninitializedVars(ifResult.uninitializedVars, elseResult.uninitializedVars);
}
@Override
public void visit(BLangMatchStatement matchStatement) {
analyzeNode(matchStatement.expr, env);
if (matchStatement.onFailClause != null) {
analyzeNode(matchStatement.onFailClause, env);
}
Map<BSymbol, InitStatus> uninitVars = new HashMap<>();
BranchResult lastPatternResult = null;
for (int i = 0; i < matchStatement.getMatchClauses().size(); i++) {
BLangMatchClause matchClause = matchStatement.getMatchClauses().get(i);
if (isLastPatternContainsIn(matchClause)) {
lastPatternResult = analyzeBranch(matchClause, env);
} else {
BranchResult result = analyzeBranch(matchClause, env);
if (result.flowTerminated) {
continue;
}
uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars);
}
}
if (lastPatternResult != null) {
uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars);
this.uninitializedVars = uninitVars;
return;
}
uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars);
this.uninitializedVars = uninitVars;
}
@Override
public void visit(BLangMatchClause matchClause) {
Location pos = matchClause.pos;
for (BVarSymbol symbol : matchClause.declaredVars.values()) {
if (!isWildCardBindingPattern(symbol)) {
this.unusedLocalVariables.put(symbol, pos);
}
}
analyzeNode(matchClause.matchGuard, env);
analyzeNode(matchClause.blockStmt, env);
}
@Override
public void visit(BLangMatchGuard matchGuard) {
analyzeNode(matchGuard.expr, env);
}
private boolean isLastPatternContainsIn(BLangMatchClause matchClause) {
for (BLangMatchPattern pattern : matchClause.matchPatterns) {
if (pattern.isLastPattern) {
return true;
}
}
return false;
}
@Override
public void visit(BLangMatch match) {
analyzeNode(match.expr, env);
if (match.onFailClause != null) {
analyzeNode(match.onFailClause, env);
}
Map<BSymbol, InitStatus> uninitVars = new HashMap<>();
BranchResult lastPatternResult = null;
for (BLangMatch.BLangMatchBindingPatternClause patternClause : match.patternClauses) {
if (patternClause.isLastPattern) {
lastPatternResult = analyzeBranch(patternClause, env);
} else {
BranchResult result = analyzeBranch(patternClause, env);
if (result.flowTerminated) {
continue;
}
uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars);
}
}
if (lastPatternResult != null) {
uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars);
this.uninitializedVars = uninitVars;
return;
}
uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars);
this.uninitializedVars = uninitVars;
}
@Override
public void visit(BLangForeach foreach) {
BLangExpression collection = foreach.collection;
if (isNotRangeExpr(collection)) {
populateUnusedVariableMapForMembers(this.unusedLocalVariables,
(BLangVariable) foreach.variableDefinitionNode.getVariable());
}
analyzeNode(collection, env);
analyzeNode(foreach.body, env);
if (foreach.onFailClause != null) {
analyzeNode(foreach.onFailClause, env);
}
}
@Override
public void visit(BLangQueryAction queryAction) {
for (BLangNode clause : queryAction.getQueryClauses()) {
analyzeNode(clause, env);
}
}
@Override
public void visit(BLangWhile whileNode) {
Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars;
analyzeNode(whileNode.expr, env);
BranchResult whileResult = analyzeBranch(whileNode.body, env);
if (whileNode.onFailClause != null) {
analyzeNode(whileNode.onFailClause, env);
}
BType constCondition = ConditionResolver.checkConstCondition(types, symTable, whileNode.expr);
if (constCondition == symTable.falseType) {
this.uninitializedVars = prevUninitializedVars;
return;
}
if (whileResult.flowTerminated || constCondition == symTable.trueType) {
this.uninitializedVars = whileResult.uninitializedVars;
return;
}
this.uninitializedVars = mergeUninitializedVars(this.uninitializedVars, whileResult.uninitializedVars);
}
@Override
public void visit(BLangDo doNode) {
analyzeNode(doNode.body, env);
if (doNode.onFailClause != null) {
analyzeNode(doNode.onFailClause, env);
}
}
public void visit(BLangFail failNode) {
analyzeNode(failNode.expr, env);
}
@Override
public void visit(BLangLock lockNode) {
analyzeNode(lockNode.body, this.env);
if (lockNode.onFailClause != null) {
analyzeNode(lockNode.onFailClause, env);
}
}
@Override
public void visit(BLangTransaction transactionNode) {
analyzeNode(transactionNode.transactionBody, env);
if (transactionNode.onFailClause != null) {
analyzeNode(transactionNode.onFailClause, env);
}
Name transactionPkgName = names.fromString(Names.DOT.value + Names.TRANSACTION_PACKAGE.value);
Name compUnitName = names.fromString(transactionNode.pos.lineRange().filePath());
this.symResolver.resolvePrefixSymbol(env, transactionPkgName, compUnitName);
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
}
@Override
public void visit(BLangCommitExpr commitExpr) {
}
@Override
public void visit(BLangRollback rollbackNode) {
analyzeNode(rollbackNode.expr, env);
}
@Override
public void visit(BLangTupleDestructure stmt) {
analyzeNode(stmt.expr, env);
checkAssignment(stmt.varRef);
}
@Override
public void visit(BLangForkJoin forkJoin) {
/* ignore */
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
analyzeNode(workerSendNode.expr, env);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
analyzeNode(syncSendExpr.expr, env);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
}
@Override
public void visit(BLangLiteral literalExpr) {
}
@Override
public void visit(BLangConstRef constRef) {
}
@Override
public void visit(BLangListConstructorExpr listConstructorExpr) {
for (BLangExpression expr : listConstructorExpr.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorExpr.BLangListConstructorSpreadOpExpr) expr).expr;
}
analyzeNode(expr, env);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
tableConstructorExpr.recordLiteralList.forEach(expr -> analyzeNode(expr, env));
checkForDuplicateKeys(tableConstructorExpr);
}
private void checkForDuplicateKeys(BLangTableConstructorExpr tableConstructorExpr) {
Set<Integer> keyHashSet = new HashSet<>();
List<String> fieldNames = getFieldNames(tableConstructorExpr);
HashMap<Integer, List<BLangExpression>> keyValues = new HashMap<>();
if (!fieldNames.isEmpty()) {
for (BLangRecordLiteral literal : tableConstructorExpr.recordLiteralList) {
List<BLangExpression> keyArray = createKeyArray(literal, fieldNames);
int hashInt = generateHash(keyArray);
if (!keyHashSet.add(hashInt) && checkForKeyEquality(keyValues, keyArray, hashInt)) {
String fields = String.join(", ", fieldNames);
String values = keyArray.stream().map(Object::toString).collect(Collectors.joining(", "));
dlog.error(literal.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_TABLE_LITERAL, fields, values);
}
keyValues.put(hashInt, keyArray);
}
}
}
private boolean checkForKeyEquality(HashMap<Integer, List<BLangExpression>> keyValues,
List<BLangExpression> keyArray, int hash) {
List<BLangExpression> existingExpList = keyValues.get(hash);
boolean isEqual = false;
if (existingExpList.size() == keyArray.size()) {
isEqual = true;
for (int i = 0; i < keyArray.size(); i++) {
isEqual = isEqual && equality(keyArray.get(i), existingExpList.get(i));
}
}
return isEqual;
}
private int generateHash(List<BLangExpression> keyArray) {
int result = 0;
for (BLangExpression expr : keyArray) {
result = 31 * result + hash(expr);
}
return result;
}
public boolean equality(Node nodeA, Node nodeB) {
if (nodeA == null || nodeB == null) {
return nodeA == nodeB;
}
if (nodeA.getKind() != nodeB.getKind()) {
return false;
}
boolean isEqual = true;
switch (nodeA.getKind()) {
case RECORD_LITERAL_EXPR:
BLangRecordLiteral recordLiteralA = (BLangRecordLiteral) nodeA;
BLangRecordLiteral recordLiteralB = (BLangRecordLiteral) nodeB;
for (int i = 0; isEqual && i < recordLiteralA.fields.size(); i++) {
RecordLiteralNode.RecordField exprA = recordLiteralA.fields.get(i);
RecordLiteralNode.RecordField exprB = recordLiteralB.fields.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case RECORD_LITERAL_KEY_VALUE:
BLangRecordLiteral.BLangRecordKeyValueField fieldA =
(BLangRecordLiteral.BLangRecordKeyValueField) nodeA;
BLangRecordLiteral.BLangRecordKeyValueField fieldB =
(BLangRecordLiteral.BLangRecordKeyValueField) nodeB;
return equality(fieldA.valueExpr, fieldB.valueExpr);
case LITERAL:
case NUMERIC_LITERAL:
BLangLiteral literalA = (BLangLiteral) nodeA;
BLangLiteral literalB = (BLangLiteral) nodeB;
return Objects.equals(literalA.value, literalB.value);
case XML_TEXT_LITERAL:
BLangXMLTextLiteral textLiteralA = (BLangXMLTextLiteral) nodeA;
BLangXMLTextLiteral textLiteralB = (BLangXMLTextLiteral) nodeB;
isEqual = equality(textLiteralA.concatExpr, textLiteralB.concatExpr);
for (int i = 0; isEqual && i < textLiteralA.textFragments.size(); i++) {
BLangExpression exprA = textLiteralA.textFragments.get(i);
BLangExpression exprB = textLiteralB.textFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XML_ATTRIBUTE:
BLangXMLAttribute attributeA = (BLangXMLAttribute) nodeA;
BLangXMLAttribute attributeB = (BLangXMLAttribute) nodeB;
return equality(attributeA.name, attributeB.name) && equality(attributeA.value, attributeB.value);
case XML_QNAME:
BLangXMLQName xmlqNameA = (BLangXMLQName) nodeA;
BLangXMLQName xmlqNameB = (BLangXMLQName) nodeA;
return equality(xmlqNameA.localname, xmlqNameB.localname)
&& equality(xmlqNameA.prefix, xmlqNameB.prefix);
case XML_ELEMENT_LITERAL:
BLangXMLElementLiteral eleLiteralA = (BLangXMLElementLiteral) nodeA;
BLangXMLElementLiteral eleLiteralB = (BLangXMLElementLiteral) nodeB;
isEqual = equality(eleLiteralA.startTagName, eleLiteralB.startTagName)
&& equality(eleLiteralA.endTagName, eleLiteralB.endTagName);
for (int i = 0; isEqual && i < eleLiteralA.attributes.size(); i++) {
BLangExpression exprA = eleLiteralA.attributes.get(i);
BLangExpression exprB = eleLiteralB.attributes.get(i);
isEqual = equality(exprA, exprB);
}
for (int i = 0; isEqual && i < eleLiteralA.children.size(); i++) {
BLangExpression exprA = eleLiteralA.children.get(i);
BLangExpression exprB = eleLiteralB.children.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XML_COMMENT_LITERAL:
BLangXMLCommentLiteral commentliteralA = (BLangXMLCommentLiteral) nodeA;
BLangXMLCommentLiteral commentliteralB = (BLangXMLCommentLiteral) nodeB;
isEqual = equality(commentliteralA.concatExpr, commentliteralB.concatExpr);
for (int i = 0; isEqual && i < commentliteralA.textFragments.size(); i++) {
BLangExpression exprA = commentliteralA.textFragments.get(i);
BLangExpression exprB = commentliteralB.textFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XML_QUOTED_STRING:
BLangXMLQuotedString quotedLiteralA = (BLangXMLQuotedString) nodeA;
BLangXMLQuotedString quotedLiteralB = (BLangXMLQuotedString) nodeB;
isEqual = equality(quotedLiteralA.concatExpr, quotedLiteralB.concatExpr);
for (int i = 0; isEqual && i < quotedLiteralA.textFragments.size(); i++) {
BLangExpression exprA = quotedLiteralA.textFragments.get(i);
BLangExpression exprB = quotedLiteralB.textFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XMLNS:
BLangXMLNS xmlnsA = (BLangXMLNS) nodeA;
BLangXMLNS xmlnsB = (BLangXMLNS) nodeB;
return equality(xmlnsA.prefix, xmlnsB.prefix) && equality(xmlnsA.namespaceURI, xmlnsB.namespaceURI);
case XML_PI_LITERAL:
BLangXMLProcInsLiteral insLiteralA = (BLangXMLProcInsLiteral) nodeA;
BLangXMLProcInsLiteral insLiteralB = (BLangXMLProcInsLiteral) nodeB;
isEqual = equality(insLiteralA.target, insLiteralB.target)
&& equality(insLiteralA.dataConcatExpr, insLiteralB.dataConcatExpr);
for (int i = 0; isEqual && i < insLiteralA.dataFragments.size(); i++) {
BLangExpression exprA = insLiteralA.dataFragments.get(i);
BLangExpression exprB = insLiteralB.dataFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case IDENTIFIER:
BLangIdentifier identifierA = (BLangIdentifier) nodeA;
BLangIdentifier identifierB = (BLangIdentifier) nodeB;
return identifierA.value.equals(identifierB.value);
case SIMPLE_VARIABLE_REF:
BLangSimpleVarRef simpleVarRefA = (BLangSimpleVarRef) nodeA;
BLangSimpleVarRef simpleVarRefB = (BLangSimpleVarRef) nodeB;
BSymbol symbolA = simpleVarRefA.symbol;
BSymbol symbolB = simpleVarRefB.symbol;
if (symbolA != null && symbolB != null
&& (Symbols.isFlagOn(symbolA.flags, Flags.CONSTANT)
&& Symbols.isFlagOn(symbolB.flags, Flags.CONSTANT))) {
return (((BConstantSymbol) symbolA).value).value
.equals((((BConstantSymbol) symbolB).value).value);
} else {
return simpleVarRefA.variableName.equals(simpleVarRefB.variableName);
}
case STRING_TEMPLATE_LITERAL:
BLangStringTemplateLiteral stringTemplateLiteralA = (BLangStringTemplateLiteral) nodeA;
BLangStringTemplateLiteral stringTemplateLiteralB = (BLangStringTemplateLiteral) nodeB;
for (int i = 0; isEqual && i < stringTemplateLiteralA.exprs.size(); i++) {
BLangExpression exprA = stringTemplateLiteralA.exprs.get(i);
BLangExpression exprB = stringTemplateLiteralB.exprs.get(i);
isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB);
}
return isEqual;
case LIST_CONSTRUCTOR_EXPR:
BLangListConstructorExpr listConstructorExprA = (BLangListConstructorExpr) nodeA;
BLangListConstructorExpr listConstructorExprB = (BLangListConstructorExpr) nodeB;
for (int i = 0; isEqual && i < listConstructorExprA.exprs.size(); i++) {
BLangExpression exprA = listConstructorExprA.exprs.get(i);
BLangExpression exprB = listConstructorExprB.exprs.get(i);
isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB);
}
return isEqual;
case TABLE_CONSTRUCTOR_EXPR:
BLangTableConstructorExpr tableConstructorExprA = (BLangTableConstructorExpr) nodeA;
BLangTableConstructorExpr tableConstructorExprB = (BLangTableConstructorExpr) nodeB;
for (int i = 0; isEqual && i < tableConstructorExprA.recordLiteralList.size(); i++) {
BLangExpression exprA = tableConstructorExprA.recordLiteralList.get(i);
BLangExpression exprB = tableConstructorExprB.recordLiteralList.get(i);
isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB);
}
return isEqual;
case TYPE_CONVERSION_EXPR:
BLangTypeConversionExpr typeConversionExprA = (BLangTypeConversionExpr) nodeA;
BLangTypeConversionExpr typeConversionExprB = (BLangTypeConversionExpr) nodeB;
return equality(typeConversionExprA.expr, typeConversionExprB.expr);
case BINARY_EXPR:
BLangBinaryExpr binaryExprA = (BLangBinaryExpr) nodeA;
BLangBinaryExpr binaryExprB = (BLangBinaryExpr) nodeB;
return equality(binaryExprA.lhsExpr, binaryExprB.lhsExpr)
&& equality(binaryExprA.rhsExpr, binaryExprB.rhsExpr);
case UNARY_EXPR:
BLangUnaryExpr unaryExprA = (BLangUnaryExpr) nodeA;
BLangUnaryExpr unaryExprB = (BLangUnaryExpr) nodeB;
return equality(unaryExprA.expr, unaryExprB.expr);
case TYPE_TEST_EXPR:
BLangTypeTestExpr typeTestExprA = (BLangTypeTestExpr) nodeA;
BLangTypeTestExpr typeTestExprB = (BLangTypeTestExpr) nodeB;
return equality(typeTestExprA.expr, typeTestExprB.expr);
case TERNARY_EXPR:
BLangTernaryExpr ternaryExprA = (BLangTernaryExpr) nodeA;
BLangTernaryExpr ternaryExprB = (BLangTernaryExpr) nodeB;
return equality(ternaryExprA.expr, ternaryExprB.expr)
&& equality(ternaryExprA.thenExpr, ternaryExprB.thenExpr)
&& equality(ternaryExprA.elseExpr, ternaryExprB.elseExpr);
case GROUP_EXPR:
BLangGroupExpr groupExprA = (BLangGroupExpr) nodeA;
BLangGroupExpr groupExprB = (BLangGroupExpr) nodeA;
return equality(groupExprA.expression, groupExprB.expression);
default:
return false;
}
}
public Integer hash(Node node) {
int result = 0;
if (node == null) {
return result;
}
if (node.getKind() == NodeKind.RECORD_LITERAL_EXPR) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) node;
for (RecordLiteralNode.RecordField entry : recordLiteral.fields) {
result = 31 * result + hash(entry);
}
} else if (node.getKind() == NodeKind.RECORD_LITERAL_KEY_VALUE) {
BLangRecordLiteral.BLangRecordKeyValueField field = (BLangRecordLiteral.BLangRecordKeyValueField) node;
result = 31 * result + hash(field.key.expr) + hash(field.valueExpr);
} else if (node.getKind() == NodeKind.ARRAY_LITERAL_EXPR) {
BLangListConstructorExpr.BLangArrayLiteral arrayLiteral =
(BLangListConstructorExpr.BLangArrayLiteral) node;
for (BLangExpression expr : arrayLiteral.exprs) {
result = 31 * result + hash(expr);
}
} else if (node.getKind() == NodeKind.LITERAL | node.getKind() == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = (BLangLiteral) node;
result = Objects.hash(literal.value);
} else if (node.getKind() == NodeKind.XML_TEXT_LITERAL) {
BLangXMLTextLiteral literal = (BLangXMLTextLiteral) node;
result = 31 * result + hash(literal.concatExpr);
for (BLangExpression expr : literal.textFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.XML_ATTRIBUTE) {
BLangXMLAttribute attribute = (BLangXMLAttribute) node;
result = 31 * result + hash(attribute.name) + hash(attribute.value);
} else if (node.getKind() == NodeKind.XML_QNAME) {
BLangXMLQName xmlqName = (BLangXMLQName) node;
result = 31 * result + hash(xmlqName.localname) + hash(xmlqName.prefix);
} else if (node.getKind() == NodeKind.XML_COMMENT_LITERAL) {
BLangXMLCommentLiteral literal = (BLangXMLCommentLiteral) node;
result = 31 * result + hash(literal.concatExpr);
for (BLangExpression expr : literal.textFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
BLangXMLElementLiteral literal = (BLangXMLElementLiteral) node;
result = 31 * result + hash(literal.startTagName) + hash(literal.endTagName);
for (BLangExpression expr : literal.attributes) {
result = 31 * result + hash(expr);
}
for (BLangExpression expr : literal.children) {
result = 31 * result + hash(expr);
}
} else if (node.getKind() == NodeKind.XML_QUOTED_STRING) {
BLangXMLQuotedString literal = (BLangXMLQuotedString) node;
result = 31 * result + hash(literal.concatExpr);
for (BLangExpression expr : literal.textFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.XMLNS) {
BLangXMLNS xmlns = (BLangXMLNS) node;
result = result * 31 + hash(xmlns.prefix) + hash(xmlns.namespaceURI);
} else if (node.getKind() == NodeKind.XML_PI_LITERAL) {
BLangXMLProcInsLiteral literal = (BLangXMLProcInsLiteral) node;
result = 31 * result + hash(literal.target) + hash(literal.dataConcatExpr);
for (BLangExpression expr : literal.dataFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.IDENTIFIER) {
BLangIdentifier identifier = (BLangIdentifier) node;
result = identifier.value.hashCode();
} else if (node.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) node;
BSymbol symbol = simpleVarRef.symbol;
if (symbol != null && Symbols.isFlagOn(symbol.flags, Flags.CONSTANT)) {
BConstantSymbol constantSymbol = (BConstantSymbol) symbol;
result = Objects.hash(constantSymbol.value.value);
} else {
result = simpleVarRef.variableName.hashCode();
}
} else if (node.getKind() == NodeKind.STRING_TEMPLATE_LITERAL) {
BLangStringTemplateLiteral stringTemplateLiteral = (BLangStringTemplateLiteral) node;
for (BLangExpression expr : stringTemplateLiteral.exprs) {
result = result * 31 + getTypeHash(stringTemplateLiteral.getBType()) + hash(expr);
}
} else if (node.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr) node;
for (BLangExpression expr : listConstructorExpr.exprs) {
result = result * 31 + getTypeHash(listConstructorExpr.getBType()) + hash(expr);
}
} else if (node.getKind() == NodeKind.TABLE_CONSTRUCTOR_EXPR) {
BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) node;
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
result = result * 31 + getTypeHash(tableConstructorExpr.getBType()) + hash(recordLiteral);
}
} else if (node.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
BLangTypeConversionExpr typeConversionExpr = (BLangTypeConversionExpr) node;
result = 31 * result + hash(typeConversionExpr.expr);
} else if (node.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) node;
result = 31 * result + hash(binaryExpr.lhsExpr) + hash(binaryExpr.rhsExpr);
} else if (node.getKind() == NodeKind.UNARY_EXPR) {
BLangUnaryExpr unaryExpr = (BLangUnaryExpr) node;
result = 31 * result + hash(unaryExpr.expr);
} else if (node.getKind() == NodeKind.TYPE_TEST_EXPR) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) node;
result = 31 * result + hash(typeTestExpr.expr);
} else if (node.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) node;
result = 31 * result + hash(ternaryExpr.expr) + hash(ternaryExpr.thenExpr) + hash(ternaryExpr.elseExpr);
} else if (node.getKind() == NodeKind.GROUP_EXPR) {
BLangGroupExpr groupExpr = (BLangGroupExpr) node;
result = 31 * result + hash(groupExpr.expression);
} else {
dlog.error(((BLangExpression) node).pos, DiagnosticErrorCode.EXPRESSION_IS_NOT_A_CONSTANT_EXPRESSION);
}
return result;
}
private Integer getTypeHash(BType type) {
return Objects.hash(type.tag, type.name);
}
private boolean getTypeEquality(BType typeA, BType typeB) {
return types.isAssignable(typeA, typeB) || types.isAssignable(typeB, typeA);
}
private List<BLangExpression> createKeyArray(BLangRecordLiteral literal, List<String> fieldNames) {
Map<String, BLangExpression> fieldMap = new HashMap<>();
for (RecordLiteralNode.RecordField recordField : literal.fields) {
if (recordField.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyVal =
(BLangRecordLiteral.BLangRecordKeyValueField) recordField;
fieldMap.put(keyVal.key.expr.toString(), keyVal.valueExpr);
} else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangRecordLiteral.BLangRecordVarNameField recordVarNameField =
(BLangRecordLiteral.BLangRecordVarNameField) recordField;
fieldMap.put(recordVarNameField.getVariableName().value, recordVarNameField);
}
}
return fieldNames.stream().map(fieldMap::get).collect(Collectors.toList());
}
private List<String> getFieldNames(BLangTableConstructorExpr constructorExpr) {
List<String> fieldNames = null;
if (Types.getReferredType(constructorExpr.getBType()).tag == TypeTags.TABLE) {
fieldNames = ((BTableType) Types.getReferredType(constructorExpr.getBType())).fieldNameList;
if (fieldNames != null) {
return fieldNames;
}
}
if (constructorExpr.tableKeySpecifier != null &&
!constructorExpr.tableKeySpecifier.fieldNameIdentifierList.isEmpty()) {
BLangTableKeySpecifier tableKeySpecifier = constructorExpr.tableKeySpecifier;
return tableKeySpecifier.fieldNameIdentifierList.stream().map(identifier ->
((BLangIdentifier) identifier).value).collect(Collectors.toList());
} else {
return new ArrayList<>();
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValuePair =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
if (keyValuePair.key.computedKey) {
analyzeNode(keyValuePair.key.expr, env);
}
analyzeNode(keyValuePair.valueExpr, env);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
analyzeNode((BLangRecordLiteral.BLangRecordVarNameField) field, env);
} else {
analyzeNode(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env);
}
}
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
this.unusedErrorVarsDeclaredWithVar.remove(varRefExpr.symbol);
if (isNotVariableReferenceLVExpr(varRefExpr)) {
this.unusedLocalVariables.remove(varRefExpr.symbol);
}
checkVarRef(varRefExpr.symbol, varRefExpr.pos);
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (!fieldAccessExpr.isLValue && isObjectMemberAccessWithSelf(fieldAccessExpr)) {
checkVarRef(fieldAccessExpr.symbol, fieldAccessExpr.pos);
}
analyzeNode(fieldAccessExpr.expr, env);
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
if (!nsPrefixedFieldBasedAccess.isLValue && isObjectMemberAccessWithSelf(nsPrefixedFieldBasedAccess)) {
checkVarRef(nsPrefixedFieldBasedAccess.symbol, nsPrefixedFieldBasedAccess.pos);
}
analyzeNode(nsPrefixedFieldBasedAccess.expr, env);
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
analyzeNode(indexAccessExpr.expr, env);
analyzeNode(indexAccessExpr.indexExpr, env);
}
@Override
public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) {
tableMultiKeyExpr.multiKeyIndexExprs.forEach(value -> analyzeNode(value, env));
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
analyzeNode(xmlElementAccess.expr, env);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
analyzeNode(xmlNavigation.expr, env);
if (xmlNavigation.childIndex == null) {
analyzeNode(xmlNavigation.childIndex, env);
}
}
@Override
public void visit(BLangInvocation invocationExpr) {
analyzeNode(invocationExpr.expr, env);
BSymbol symbol = invocationExpr.symbol;
this.unusedLocalVariables.remove(symbol);
if (isFunctionOrMethodDefinedInCurrentModule(symbol.owner, env) &&
!isGlobalVarsInitialized(invocationExpr.pos, invocationExpr)) {
checkVarRef(symbol, invocationExpr.pos);
return;
}
if (!isFieldsInitializedForSelfArgument(invocationExpr)) {
return;
}
if (!isFieldsInitializedForSelfInvocation(invocationExpr.requiredArgs, invocationExpr.pos)) {
return;
}
if (!isFieldsInitializedForSelfInvocation(invocationExpr.restArgs, invocationExpr.pos)) {
return;
}
checkVarRef(symbol, invocationExpr.pos);
invocationExpr.requiredArgs.forEach(expr -> analyzeNode(expr, env));
invocationExpr.restArgs.forEach(expr -> analyzeNode(expr, env));
BSymbol owner = this.env.scope.owner;
if (owner.kind == SymbolKind.FUNCTION) {
BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) owner;
Name name = names.fromIdNode(invocationExpr.name);
BSymbol dependsOnFunctionSym = symResolver.lookupSymbolInMainSpace(this.env, name);
if (symTable.notFoundSymbol != dependsOnFunctionSym) {
addDependency(invokableOwnerSymbol, dependsOnFunctionSym);
}
} else if (symbol != null && symbol.kind == SymbolKind.FUNCTION) {
BInvokableSymbol invokableProviderSymbol = (BInvokableSymbol) symbol;
BSymbol curDependent = this.currDependentSymbolDeque.peek();
if (curDependent != null && isGlobalVarSymbol(curDependent)) {
addDependency(curDependent, invokableProviderSymbol);
}
}
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
for (BLangExpression positionalArg : errorConstructorExpr.positionalArgs) {
analyzeNode(positionalArg, env);
}
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
analyzeNode(namedArg, env);
}
}
@Override
public void visit(BLangActionInvocation actionInvocation) {
this.visit((BLangInvocation) actionInvocation);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
for (BLangNode clause : queryExpr.getQueryClauses()) {
analyzeNode(clause, env);
}
}
@Override
public void visit(BLangFromClause fromClause) {
BLangExpression collection = fromClause.collection;
if (isNotRangeExpr(collection)) {
populateUnusedVariableMapForMembers(this.unusedLocalVariables,
(BLangVariable) fromClause.variableDefinitionNode.getVariable());
}
analyzeNode(collection, env);
}
@Override
public void visit(BLangJoinClause joinClause) {
populateUnusedVariableMapForMembers(this.unusedLocalVariables,
(BLangVariable) joinClause.variableDefinitionNode.getVariable());
analyzeNode(joinClause.collection, env);
if (joinClause.onClause != null) {
analyzeNode((BLangNode) joinClause.onClause, env);
}
}
@Override
public void visit(BLangLetClause letClause) {
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
analyzeNode((BLangNode) letVariable.definitionNode, env);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
analyzeNode(whereClause.expression, env);
}
@Override
public void visit(BLangOnClause onClause) {
analyzeNode(onClause.lhsExpr, env);
analyzeNode(onClause.rhsExpr, env);
}
@Override
public void visit(BLangOrderKey orderKeyClause) {
analyzeNode(orderKeyClause.expression, env);
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.orderByKeyList.forEach(value -> analyzeNode((BLangNode) value, env));
}
@Override
public void visit(BLangSelectClause selectClause) {
analyzeNode(selectClause.expression, env);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
analyzeNode(onConflictClause.expression, env);
}
@Override
public void visit(BLangLimitClause limitClause) {
analyzeNode(limitClause.expression, env);
}
@Override
public void visit(BLangDoClause doClause) {
analyzeNode(doClause.body, env);
}
@Override
public void visit(BLangOnFailClause onFailClause) {
analyzeNode((BLangVariable) onFailClause.variableDefinitionNode.getVariable(), env);
analyzeNode(onFailClause.body, env);
}
private boolean isFieldsInitializedForSelfArgument(BLangInvocation invocationExpr) {
if (invocationExpr.expr == null || !isSelfKeyWordExpr(invocationExpr.expr)) {
return true;
}
StringBuilder uninitializedFields =
getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef)
invocationExpr.expr).symbol.type);
if (uninitializedFields.length() != 0) {
this.dlog.error(invocationExpr.pos, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS,
uninitializedFields.toString());
return false;
}
return true;
}
private boolean isFieldsInitializedForSelfInvocation(List<BLangExpression> argExpressions,
Location location) {
for (BLangExpression expr : argExpressions) {
if (isSelfKeyWordExpr(expr)) {
StringBuilder uninitializedFields =
getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef) expr).symbol.type);
if (uninitializedFields.length() != 0) {
this.dlog.error(location, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS,
uninitializedFields.toString());
return false;
}
}
}
return true;
}
private boolean isSelfKeyWordExpr(BLangExpression expr) {
return expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
Names.SELF.value.equals(((BLangSimpleVarRef) expr).getVariableName().getValue());
}
private StringBuilder getUninitializedFieldsForSelfKeyword(BObjectType objType) {
boolean isFirstUninitializedField = true;
StringBuilder uninitializedFields = new StringBuilder();
for (BField field : objType.fields.values()) {
if (this.uninitializedVars.containsKey(field.symbol)) {
if (isFirstUninitializedField) {
uninitializedFields = new StringBuilder(field.symbol.getName().value);
isFirstUninitializedField = false;
} else {
uninitializedFields.append(", ").append(field.symbol.getName().value);
}
}
}
return uninitializedFields;
}
private boolean isGlobalVarSymbol(BSymbol symbol) {
if (symbol == null) {
return false;
} else if (symbol.owner == null) {
return false;
} else if (symbol.owner.tag != SymTag.PACKAGE) {
return false;
}
return isVariableOrConstant(symbol);
}
private boolean isVariableOrConstant(BSymbol symbol) {
if (symbol == null) {
return false;
}
return ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) ||
((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT);
}
/**
* Register dependent symbol to the provider symbol.
* Let global int a = b, a depend on b.
* Let func foo() { returns b + 1; }, where b is a global var, then foo depends on b.
*
* @param dependent dependent.
* @param provider object which provides a value.
*/
private void addDependency(BSymbol dependent, BSymbol provider) {
if (provider == null || dependent == null || dependent.pkgID != provider.pkgID) {
return;
}
Set<BSymbol> providers = globalNodeDependsOn.computeIfAbsent(dependent, s -> new LinkedHashSet<>());
providers.add(provider);
addFunctionToGlobalVarDependency(dependent, provider);
}
private void addFunctionToGlobalVarDependency(BSymbol dependent, BSymbol provider) {
if (dependent.kind != SymbolKind.FUNCTION && !isGlobalVarSymbol(dependent)) {
return;
}
if (isVariableOrConstant(provider) && !isGlobalVarSymbol(provider)) {
return;
}
Set<BSymbol> providers = this.functionToDependency.computeIfAbsent(dependent, s -> new HashSet<>());
providers.add(provider);
}
@Override
public void visit(BLangTypeInit typeInitExpr) {
typeInitExpr.argsExpr.forEach(argExpr -> analyzeNode(argExpr, env));
if (this.currDependentSymbolDeque.peek() != null) {
addDependency(this.currDependentSymbolDeque.peek(),
Types.getReferredType(typeInitExpr.getBType()).tsymbol);
}
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
analyzeNode(ternaryExpr.expr, env);
analyzeNode(ternaryExpr.thenExpr, env);
analyzeNode(ternaryExpr.elseExpr, env);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
analyzeNode(waitExpr.getExpression(), env);
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
}
@Override
public void visit(BLangWaitForAllExpr waitForAllExpr) {
waitForAllExpr.keyValuePairs.forEach(keyValue -> {
BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr;
analyzeNode(expr, env);
});
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
analyzeNode(binaryExpr.lhsExpr, env);
analyzeNode(binaryExpr.rhsExpr, env);
}
@Override
public void visit(BLangElvisExpr elvisExpr) {
analyzeNode(elvisExpr.lhsExpr, env);
analyzeNode(elvisExpr.rhsExpr, env);
}
@Override
public void visit(BLangGroupExpr groupExpr) {
analyzeNode(groupExpr.expression, env);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
analyzeNode(unaryExpr.expr, env);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
analyzeNode(conversionExpr.expr, env);
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
analyzeNode(xmlAttribute.value, env);
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.children.forEach(expr -> analyzeNode(expr, env));
xmlElementLiteral.attributes.forEach(expr -> analyzeNode(expr, env));
xmlElementLiteral.inlineNamespaces.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.textFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.textFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.dataFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.textFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
stringTemplateLiteral.exprs.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
for (BLangLiteral string : rawTemplateLiteral.strings) {
analyzeNode(string, env);
}
for (BLangExpression expr : rawTemplateLiteral.insertions) {
analyzeNode(expr, env);
}
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
BLangFunction funcNode = bLangLambdaFunction.function;
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
visitFunctionBodyWithDynamicEnv(funcNode, funcEnv);
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
analyzeNode(bLangVarArgsExpression.expr, env);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
analyzeNode(bLangNamedArgsExpression.expr, env);
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
}
@Override
public void visit(BLangMatchExpression matchExpression) {
analyzeNode(matchExpression.expr, env);
matchExpression.patternClauses.forEach(pattern -> analyzeNode(pattern, env));
}
@Override
public void visit(BLangMatchExprPatternClause matchExprPatternClause) {
analyzeNode(matchExprPatternClause.expr, env);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
analyzeNode(checkedExpr.expr, env);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanicExpr) {
analyzeNode(checkPanicExpr.expr, env);
}
@Override
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
bLangXMLSequenceLiteral.xmlItems.forEach(xml -> analyzeNode(xml, env));
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
analyzeNode(exprStmtNode.expr, env);
}
@Override
public void visit(BLangAnnotation annotationNode) {
}
@Override
public void visit(BLangAnnotationAttachment annAttachmentNode) {
}
@Override
public void visit(BLangRetry retryNode) {
analyzeNode(retryNode.retryBody, env);
if (retryNode.onFailClause != null) {
analyzeNode(retryNode.onFailClause, env);
}
}
@Override
public void visit(BLangRetryTransaction retryTransaction) {
analyzeNode(retryTransaction.transaction, env);
}
@Override
public void visit(BLangContinue continueNode) {
terminateFlow();
}
@Override
public void visit(BLangTypedescExpr accessExpr) {
}
@Override
public void visit(BLangXMLQName xmlQName) {
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
for (ClosureVarSymbol closureVarSymbol : bLangArrowFunction.closureVarSymbols) {
BSymbol symbol = closureVarSymbol.bSymbol;
if (this.uninitializedVars.containsKey(symbol)) {
this.dlog.error(closureVarSymbol.diagnosticLocation,
DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol);
}
this.unusedErrorVarsDeclaredWithVar.remove(symbol);
this.unusedLocalVariables.remove(symbol);
}
}
@Override
public void visit(BLangValueType valueType) {
}
@Override
public void visit(BLangConstant constant) {
boolean validVariable = constant.symbol != null;
if (validVariable) {
this.currDependentSymbolDeque.push(constant.symbol);
}
try {
analyzeNode(constant.expr, env);
} finally {
if (validVariable) {
this.currDependentSymbolDeque.pop();
}
}
}
@Override
public void visit(BLangArrayType arrayType) {
analyzeNode(arrayType.getElementType(), env);
}
@Override
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
}
@Override
public void visit(BLangConstrainedType constrainedType) {
analyzeNode(constrainedType.constraint, env);
}
@Override
public void visit(BLangStreamType streamType) {
analyzeNode(streamType.constraint, env);
analyzeNode(streamType.error, env);
}
@Override
public void visit(BLangTableTypeNode tableType) {
analyzeNode(tableType.constraint, env);
if (tableType.tableKeyTypeConstraint != null) {
analyzeNode(tableType.tableKeyTypeConstraint.keyType, env);
}
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
if (this.currDependentSymbolDeque.isEmpty()) {
return;
}
BType resolvedType = Types.getReferredType(userDefinedType.getBType());
if (resolvedType == symTable.semanticError) {
return;
}
BTypeSymbol tsymbol = resolvedType.tsymbol;
recordGlobalVariableReferenceRelationship(tsymbol);
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
if (functionTypeNode.flagSet.contains(Flag.ANY_FUNCTION)) {
return;
}
functionTypeNode.params.forEach(param -> analyzeNode(param.typeNode, env));
analyzeNode(functionTypeNode.returnTypeNode, env);
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
unionTypeNode.memberTypeNodes.forEach(typeNode -> analyzeNode(typeNode, env));
}
@Override
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) {
analyzeNode(constituentTypeNode, env);
}
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
BTypeSymbol tsymbol = Types.getReferredType(recordTypeNode.getBType()).tsymbol;
for (TypeNode type : recordTypeNode.getTypeReferences()) {
BLangType bLangType = (BLangType) type;
analyzeNode(bLangType, env);
recordGlobalVariableReferenceRelationship(
Types.getReferredType(bLangType.getBType()).tsymbol);
}
for (BLangSimpleVariable field : recordTypeNode.fields) {
addTypeDependency(tsymbol, Types.getReferredType(field.getBType()), new HashSet<>());
analyzeNode(field, env);
recordGlobalVariableReferenceRelationship(field.symbol);
}
}
private void addTypeDependency(BTypeSymbol dependentTypeSymbol, BType providerType, Set<BType> unresolvedTypes) {
if (unresolvedTypes.contains(providerType)) {
return;
}
unresolvedTypes.add(providerType);
switch (providerType.tag) {
case TypeTags.UNION:
for (BType memberType : ((BUnionType) providerType).getMemberTypes()) {
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(memberType);
addTypeDependency(dependentTypeSymbol, effectiveType, unresolvedTypes);
}
break;
case TypeTags.ARRAY:
addTypeDependency(dependentTypeSymbol,
types.getTypeWithEffectiveIntersectionTypes(((BArrayType) providerType).getElementType()),
unresolvedTypes);
break;
case TypeTags.MAP:
addTypeDependency(dependentTypeSymbol,
types.getTypeWithEffectiveIntersectionTypes(((BMapType) providerType).getConstraint()),
unresolvedTypes);
break;
case TypeTags.TYPEREFDESC:
addTypeDependency(dependentTypeSymbol, Types.getReferredType(providerType),
unresolvedTypes);
break;
default:
addDependency(dependentTypeSymbol, providerType.tsymbol);
}
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
finiteTypeNode.valueSpace.forEach(value -> analyzeNode(value, env));
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
tupleTypeNode.memberTypeNodes.forEach(type -> analyzeNode(type, env));
}
@Override
public void visit(BLangMarkdownDocumentationLine bLangMarkdownDocumentationLine) {
}
@Override
public void visit(BLangMarkdownParameterDocumentation bLangDocumentationParameter) {
}
@Override
public void visit(BLangMarkdownReturnParameterDocumentation bLangMarkdownReturnParameterDocumentation) {
}
@Override
public void visit(BLangMarkdownDocumentation bLangMarkdownDocumentation) {
}
@Override
public void visit(BLangTestablePackage testablePkgNode) {
}
@Override
public void visit(BLangImportPackage importPkgNode) {
}
@Override
public void visit(BLangIdentifier identifierNode) {
}
@Override
public void visit(BLangPanic panicNode) {
analyzeNode(panicNode.expr, env);
terminateFlow();
}
@Override
public void visit(BLangTrapExpr trapExpr) {
analyzeNode(trapExpr.expr, env);
}
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
if (this.currDependentSymbolDeque.peek() != null) {
addDependency(this.currDependentSymbolDeque.peek(),
Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol);
}
addDependency(Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol,
serviceConstructorExpr.serviceNode.symbol);
analyzeNode(serviceConstructorExpr.serviceNode, env);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
analyzeNode(typeTestExpr.expr, env);
analyzeNode(typeTestExpr.typeNode, env);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
analyzeNode(annotAccessExpr.expr, env);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
}
@Override
public void visit(BLangErrorType errorType) {
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
analyzeNode(recordDestructure.expr, env);
checkAssignment(recordDestructure.varRef);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
analyzeNode(errorDestructure.expr, env);
checkAssignment(errorDestructure.varRef);
}
@Override
public void visit(BLangTupleVarRef tupleVarRefExpr) {
tupleVarRefExpr.expressions.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
varRefExpr.recordRefFields.forEach(expr -> analyzeNode(expr.variableReference, env));
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
analyzeNode(varRefExpr.message, env);
if (varRefExpr.cause != null) {
analyzeNode(varRefExpr.cause, env);
}
for (BLangNamedArgsExpression args : varRefExpr.detail) {
analyzeNode(args.expr, env);
}
analyzeNode(varRefExpr.restVar, env);
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable) {
analyzeNode(bLangTupleVariable.typeNode, env);
populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangTupleVariable);
this.currDependentSymbolDeque.push(bLangTupleVariable.symbol);
analyzeNode(bLangTupleVariable.expr, env);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef) {
analyzeNode(bLangTupleVariableDef.var, env);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable) {
analyzeNode(bLangRecordVariable.typeNode, env);
populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangRecordVariable);
this.currDependentSymbolDeque.push(bLangRecordVariable.symbol);
analyzeNode(bLangRecordVariable.expr, env);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef) {
analyzeNode(bLangRecordVariableDef.var, env);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable) {
analyzeNode(bLangErrorVariable.typeNode, env);
populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangErrorVariable);
this.currDependentSymbolDeque.push(bLangErrorVariable.symbol);
analyzeNode(bLangErrorVariable.expr, env);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef) {
analyzeNode(bLangErrorVariableDef.errorVariable, env);
}
@Override
public void visit(BLangMatchStaticBindingPatternClause bLangMatchStaticBindingPatternClause) {
analyzeNode(bLangMatchStaticBindingPatternClause.body, env);
}
@Override
public void visit(BLangMatchStructuredBindingPatternClause bLangMatchStructuredBindingPatternClause) {
analyzeNode(bLangMatchStructuredBindingPatternClause.body, env);
}
private void addUninitializedVar(BLangVariable variable) {
if (!this.uninitializedVars.containsKey(variable.symbol)) {
this.uninitializedVars.put(variable.symbol, InitStatus.UN_INIT);
}
}
/**
* Analyze a branch and returns the set of uninitialized variables for that branch.
* This method will not update the current uninitialized variables set.
*
* @param node Branch node to be analyzed
* @param env Symbol environment
* @return Result of the branch.
*/
private BranchResult analyzeBranch(BLangNode node, SymbolEnv env) {
Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars;
boolean prevFlowTerminated = this.flowTerminated;
this.uninitializedVars = copyUninitializedVars();
this.flowTerminated = false;
analyzeNode(node, env);
BranchResult brachResult = new BranchResult(this.uninitializedVars, this.flowTerminated);
this.uninitializedVars = prevUninitializedVars;
this.flowTerminated = prevFlowTerminated;
return brachResult;
}
private Map<BSymbol, InitStatus> copyUninitializedVars() {
return new HashMap<>(this.uninitializedVars);
}
private void analyzeNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
if (node != null) {
node.accept(this);
}
this.env = prevEnv;
}
private Map<BSymbol, InitStatus> mergeUninitializedVars(Map<BSymbol, InitStatus> firstUninitVars,
Map<BSymbol, InitStatus> secondUninitVars) {
List<BSymbol> intersection = new ArrayList<>(firstUninitVars.keySet());
intersection.retainAll(secondUninitVars.keySet());
return Stream.concat(firstUninitVars.entrySet().stream(), secondUninitVars.entrySet().stream())
.collect(Collectors.toMap(entry -> entry.getKey(),
entry -> intersection.contains(entry.getKey()) ? entry.getValue() : InitStatus.PARTIAL_INIT,
(a, b) -> {
if (a == InitStatus.PARTIAL_INIT || b == InitStatus.PARTIAL_INIT) {
return InitStatus.PARTIAL_INIT;
}
return InitStatus.UN_INIT;
}));
}
private void checkVarRef(BSymbol symbol, Location pos) {
recordGlobalVariableReferenceRelationship(symbol);
InitStatus initStatus = this.uninitializedVars.get(symbol);
if (initStatus == null) {
return;
}
if (initStatus == InitStatus.UN_INIT) {
this.dlog.error(pos, DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol);
return;
}
this.dlog.error(pos, DiagnosticErrorCode.PARTIALLY_INITIALIZED_VARIABLE, symbol);
}
private void recordGlobalVariableReferenceRelationship(BSymbol symbol) {
if (this.env.scope == null) {
return;
}
boolean globalVarSymbol = isGlobalVarSymbol(symbol);
BSymbol ownerSymbol = this.env.scope.owner;
boolean isInPkgLevel = ownerSymbol.getKind() == SymbolKind.PACKAGE;
if (isInPkgLevel && (globalVarSymbol || symbol instanceof BTypeSymbol)
|| (ownerSymbol.tag == SymTag.LET && globalVarSymbol)) {
BSymbol dependent = this.currDependentSymbolDeque.peek();
addDependency(dependent, symbol);
} else if (ownerSymbol.kind == SymbolKind.FUNCTION && globalVarSymbol) {
BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) ownerSymbol;
addDependency(invokableOwnerSymbol, symbol);
} else if (ownerSymbol.kind == SymbolKind.OBJECT && globalVarSymbol) {
addDependency(ownerSymbol, symbol);
} else if (ownerSymbol.kind == SymbolKind.RECORD && globalVarSymbol) {
addDependency(ownerSymbol, symbol);
}
}
private boolean isObjectMemberAccessWithSelf(BLangAccessExpression fieldAccessExpr) {
if (fieldAccessExpr.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return Names.SELF.value.equals(((BLangSimpleVarRef) fieldAccessExpr.expr).variableName.value);
}
private void checkAssignment(BLangExpression varRef) {
NodeKind kind = varRef.getKind();
switch (kind) {
case RECORD_VARIABLE_REF:
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) varRef;
recordVarRef.recordRefFields.forEach(field -> checkAssignment(field.variableReference));
if (recordVarRef.restParam != null) {
checkAssignment((BLangExpression) recordVarRef.restParam);
}
return;
case TUPLE_VARIABLE_REF:
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) varRef;
tupleVarRef.expressions.forEach(this::checkAssignment);
if (tupleVarRef.restParam != null) {
checkAssignment((BLangExpression) tupleVarRef.restParam);
}
return;
case ERROR_VARIABLE_REF:
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) varRef;
if (errorVarRef.message != null) {
checkAssignment(errorVarRef.message);
}
if (errorVarRef.cause != null) {
checkAssignment(errorVarRef.cause);
}
for (BLangNamedArgsExpression expression : errorVarRef.detail) {
checkAssignment(expression);
this.uninitializedVars.remove(((BLangVariableReference) expression.expr).symbol);
}
if (errorVarRef.restVar != null) {
checkAssignment(errorVarRef.restVar);
}
return;
case INDEX_BASED_ACCESS_EXPR:
case FIELD_BASED_ACCESS_EXPR:
BLangAccessExpression accessExpr = (BLangAccessExpression) varRef;
BLangExpression expr = accessExpr.expr;
BType type = Types.getReferredType(expr.getBType());
if (isObjectMemberAccessWithSelf(accessExpr)) {
BObjectType objectType = (BObjectType) type;
BSymbol symbol = accessExpr.symbol;
if (this.uninitializedVars.containsKey(symbol)) {
this.uninitializedVars.remove(symbol);
return;
}
String fieldName = ((BLangFieldBasedAccess) varRef).field.value;
checkFinalEntityUpdate(varRef.pos, fieldName, objectType.fields.get(fieldName).symbol);
return;
}
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
checkFinalObjectFieldUpdate((BLangFieldBasedAccess) accessExpr);
}
analyzeNode(expr, env);
if (kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
analyzeNode(((BLangIndexBasedAccess) varRef).indexExpr, env);
}
return;
default:
break;
}
if (kind != NodeKind.SIMPLE_VARIABLE_REF && kind != NodeKind.XML_ATTRIBUTE_ACCESS_EXPR) {
return;
}
if (kind == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol symbol = ((BLangSimpleVarRef) varRef).symbol;
checkFinalEntityUpdate(varRef.pos, varRef, symbol);
BSymbol owner = this.currDependentSymbolDeque.peek();
addFunctionToGlobalVarDependency(owner, ((BLangSimpleVarRef) varRef).symbol);
}
this.uninitializedVars.remove(((BLangVariableReference) varRef).symbol);
}
private void checkFinalObjectFieldUpdate(BLangFieldBasedAccess fieldAccess) {
BLangExpression expr = fieldAccess.expr;
BType exprType = Types.getReferredType(expr.getBType());
if (types.isSubTypeOfBaseType(exprType, TypeTags.OBJECT) &&
isFinalFieldInAllObjects(fieldAccess.pos, exprType, fieldAccess.field.value)) {
dlog.error(fieldAccess.pos, DiagnosticErrorCode.CANNOT_UPDATE_FINAL_OBJECT_FIELD, fieldAccess.symbol);
}
}
private boolean isFinalFieldInAllObjects(Location pos, BType btype, String fieldName) {
BType type = Types.getReferredType(btype);
if (type.tag == TypeTags.OBJECT) {
BField field = ((BObjectType) type).fields.get(fieldName);
if (field != null) {
return Symbols.isFlagOn(field.symbol.flags, Flags.FINAL);
}
BObjectTypeSymbol objTypeSymbol = (BObjectTypeSymbol) type.tsymbol;
Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objTypeSymbol.name.value, fieldName));
BSymbol funcSymbol = symResolver.resolveObjectMethod(pos, env, funcName, objTypeSymbol);
return funcSymbol != null;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isFinalFieldInAllObjects(pos, memberType, fieldName)) {
return false;
}
}
return true;
}
private void checkFinalEntityUpdate(Location pos, Object field, BSymbol symbol) {
if (symbol == null || !Symbols.isFlagOn(symbol.flags, Flags.FINAL)) {
return;
}
if (!this.uninitializedVars.containsKey(symbol)) {
dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_FINAL, symbol);
return;
}
InitStatus initStatus = this.uninitializedVars.get(symbol);
if (initStatus == InitStatus.PARTIAL_INIT) {
dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_TO_POTENTIALLY_INITIALIZED_FINAL, symbol);
}
}
private void terminateFlow() {
this.flowTerminated = true;
}
private void checkUnusedImports(List<BLangImportPackage> imports) {
for (BLangImportPackage importStmt : imports) {
if (importStmt.symbol == null || importStmt.symbol.isUsed ||
Names.IGNORE.value.equals(importStmt.alias.value)) {
continue;
}
dlog.error(importStmt.alias.pos, DiagnosticErrorCode.UNUSED_MODULE_PREFIX, importStmt.alias.value);
}
}
private void checkUnusedErrorVarsDeclaredWithVar() {
for (Map.Entry<BSymbol, Location> entry : this.unusedErrorVarsDeclaredWithVar.entrySet()) {
this.dlog.error(entry.getValue(), DiagnosticErrorCode.UNUSED_VARIABLE_WITH_INFERRED_TYPE_INCLUDING_ERROR,
entry.getKey().name);
}
}
private void emitUnusedVariableWarnings(Map<BSymbol, Location> unusedLocalVariables) {
for (Map.Entry<BSymbol, Location> entry : unusedLocalVariables.entrySet()) {
this.dlog.warning(entry.getValue(), DiagnosticWarningCode.UNUSED_LOCAL_VARIABLE, entry.getKey().name);
}
}
private boolean addVarIfInferredTypeIncludesError(BLangSimpleVariable variable) {
BType typeIntersection =
types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionContext(),
variable.getBType(), symTable.errorType, env);
if (typeIntersection != null &&
typeIntersection != symTable.semanticError && typeIntersection != symTable.noType) {
unusedErrorVarsDeclaredWithVar.put(variable.symbol, variable.pos);
return true;
}
return false;
}
private boolean isLocalVariableDefinedWithNonWildCardBindingPattern(BLangSimpleVariable variable) {
if (isWildCardBindingPattern(variable)) {
return false;
}
return isLocalVariable(variable.symbol);
}
private boolean isWildCardBindingPattern(BLangSimpleVariable variable) {
return Names.IGNORE.value.equals(variable.name.value);
}
private boolean isWildCardBindingPattern(BVarSymbol symbol) {
return Names.IGNORE == symbol.name;
}
private boolean isLocalVariable(BVarSymbol symbol) {
if (symbol == null) {
return false;
}
BSymbol owner = symbol.owner;
if (owner == null || owner.tag == SymTag.PACKAGE) {
return false;
}
if (owner.tag == SymTag.LET) {
return true;
}
if (owner.tag != SymTag.FUNCTION) {
return false;
}
long flags = symbol.flags;
SymbolKind kind = symbol.kind;
if (kind == SymbolKind.PATH_PARAMETER || kind == SymbolKind.PATH_REST_PARAMETER) {
return false;
}
return !Symbols.isFlagOn(flags, Flags.REQUIRED_PARAM)
&& !Symbols.isFlagOn(flags, Flags.DEFAULTABLE_PARAM)
&& !Symbols.isFlagOn(flags, Flags.INCLUDED)
&& !Symbols.isFlagOn(flags, Flags.REST_PARAM);
}
private void populateUnusedVariableMapForNonSimpleBindingPatternVariables(
Map<BSymbol, Location> unusedLocalVariables, BLangVariable variable) {
if (!isLocalVariable(variable.symbol)) {
return;
}
populateUnusedVariableMapForMembers(unusedLocalVariables, variable);
}
private void populateUnusedVariableMapForMembers(Map<BSymbol, Location> unusedLocalVariables,
BLangVariable variable) {
if (variable == null) {
return;
}
switch (variable.getKind()) {
case VARIABLE:
BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;
if (!isWildCardBindingPattern(simpleVariable)) {
unusedLocalVariables.put(simpleVariable.symbol, simpleVariable.pos);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
for (BLangRecordVariable.BLangRecordVariableKeyValue member : recordVariable.variableList) {
populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern);
}
populateUnusedVariableMapForMembers(unusedLocalVariables, (BLangVariable) recordVariable.restParam);
break;
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
for (BLangVariable memberVariable : tupleVariable.memberVariables) {
populateUnusedVariableMapForMembers(unusedLocalVariables, memberVariable);
}
populateUnusedVariableMapForMembers(unusedLocalVariables, tupleVariable.restVariable);
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.message);
populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.cause);
for (BLangErrorVariable.BLangErrorDetailEntry member : errorVariable.detail) {
populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern);
}
populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.restDetail);
break;
}
}
private boolean isNotVariableReferenceLVExpr(BLangSimpleVarRef varRefExpr) {
if (!varRefExpr.isLValue) {
return true;
}
BLangNode parent = varRefExpr.parent;
return parent != null && parent.getKind() != NodeKind.ASSIGNMENT;
}
private boolean isNotRangeExpr(BLangExpression collection) {
if (collection.getKind() != NodeKind.BINARY_EXPR) {
return true;
}
OperatorKind opKind = ((BLangBinaryExpr) collection).opKind;
return opKind != OperatorKind.HALF_OPEN_RANGE && opKind != OperatorKind.CLOSED_RANGE;
}
private boolean isFunctionOrMethodDefinedInCurrentModule(BSymbol owner, SymbolEnv env) {
if (Symbols.isFlagOn(owner.flags, Flags.CLASS)) {
return owner.owner == getEnclPkgSymbol(env);
}
return owner == getEnclPkgSymbol(env);
}
private BPackageSymbol getEnclPkgSymbol(SymbolEnv env) {
BLangPackage enclPkg = env.enclPkg;
if (enclPkg != null) {
return enclPkg.symbol;
}
SymbolEnv enclEnv = env.enclEnv;
if (enclEnv == null) {
return null;
}
return getEnclPkgSymbol(enclEnv);
}
private enum InitStatus {
UN_INIT, PARTIAL_INIT
}
private class BranchResult {
Map<BSymbol, InitStatus> uninitializedVars;
boolean flowTerminated;
BranchResult(Map<BSymbol, InitStatus> uninitializedVars, boolean flowTerminated) {
this.uninitializedVars = uninitializedVars;
this.flowTerminated = flowTerminated;
}
}
} | class DataflowAnalyzer extends BLangNodeVisitor {
private final SymbolResolver symResolver;
private final Names names;
private SymbolEnv env;
private SymbolTable symTable;
private BLangDiagnosticLog dlog;
private Types types;
private Map<BSymbol, InitStatus> uninitializedVars;
private Map<BSymbol, Location> unusedErrorVarsDeclaredWithVar;
private Map<BSymbol, Location> unusedLocalVariables;
private Map<BSymbol, Set<BSymbol>> globalNodeDependsOn;
private Map<BSymbol, Set<BSymbol>> functionToDependency;
private boolean flowTerminated = false;
private static final CompilerContext.Key<DataflowAnalyzer> DATAFLOW_ANALYZER_KEY = new CompilerContext.Key<>();
private Deque<BSymbol> currDependentSymbolDeque;
private final GlobalVariableRefAnalyzer globalVariableRefAnalyzer;
private DataflowAnalyzer(CompilerContext context) {
context.put(DATAFLOW_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.names = Names.getInstance(context);
this.currDependentSymbolDeque = new ArrayDeque<>();
this.globalVariableRefAnalyzer = GlobalVariableRefAnalyzer.getInstance(context);
this.unusedLocalVariables = new HashMap<>();
}
public static DataflowAnalyzer getInstance(CompilerContext context) {
DataflowAnalyzer dataflowAnalyzer = context.get(DATAFLOW_ANALYZER_KEY);
if (dataflowAnalyzer == null) {
dataflowAnalyzer = new DataflowAnalyzer(context);
}
return dataflowAnalyzer;
}
/**
* Perform data-flow analysis on a package.
*
* @param pkgNode Package to perform data-flow analysis.
* @return Data-flow analyzed package
*/
public BLangPackage analyze(BLangPackage pkgNode) {
this.uninitializedVars = new LinkedHashMap<>();
this.globalNodeDependsOn = new LinkedHashMap<>();
this.functionToDependency = new HashMap<>();
this.dlog.setCurrentPackageId(pkgNode.packageID);
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);
analyzeNode(pkgNode, pkgEnv);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DATAFLOW_ANALYZE)) {
return;
}
Map<BSymbol, Location> prevUnusedErrorVarsDeclaredWithVar = this.unusedErrorVarsDeclaredWithVar;
this.unusedErrorVarsDeclaredWithVar = new HashMap<>();
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
List<TopLevelNode> sortedListOfNodes = new ArrayList<>(pkgNode.globalVars);
addModuleInitToSortedNodeList(pkgNode, sortedListOfNodes);
addNodesToSortedNodeList(pkgNode, sortedListOfNodes);
for (TopLevelNode topLevelNode : sortedListOfNodes) {
if (isModuleInitFunction((BLangNode) topLevelNode)) {
analyzeModuleInitFunc((BLangFunction) topLevelNode);
} else {
if (topLevelNode.getKind() == NodeKind.CLASS_DEFN) {
BLangClassDefinition classDef = (BLangClassDefinition) topLevelNode;
if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
continue;
}
}
analyzeNode((BLangNode) topLevelNode, env);
}
}
checkForUninitializedGlobalVars(pkgNode.globalVars);
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
this.globalVariableRefAnalyzer.analyzeAndReOrder(pkgNode, this.globalNodeDependsOn);
this.globalVariableRefAnalyzer.populateFunctionDependencies(this.functionToDependency, pkgNode.globalVars);
pkgNode.globalVariableDependencies = globalVariableRefAnalyzer.getGlobalVariablesDependsOn();
checkUnusedImports(pkgNode.imports);
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
checkUnusedErrorVarsDeclaredWithVar();
this.unusedErrorVarsDeclaredWithVar = prevUnusedErrorVarsDeclaredWithVar;
pkgNode.completedPhases.add(CompilerPhase.DATAFLOW_ANALYZE);
}
private void addModuleInitToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) {
for (TopLevelNode node : pkgNode.topLevelNodes) {
if (isModuleInitFunction((BLangNode) node)) {
sortedListOfNodes.add(node);
break;
}
}
}
private void addNodesToSortedNodeList(BLangPackage pkgNode, List<TopLevelNode> sortedListOfNodes) {
pkgNode.topLevelNodes.forEach(topLevelNode -> {
if (!sortedListOfNodes.contains(topLevelNode)) {
sortedListOfNodes.add(topLevelNode);
}
});
}
private boolean isModuleInitFunction(BLangNode node) {
return node.getKind() == NodeKind.FUNCTION &&
Names.USER_DEFINED_INIT_SUFFIX.value.equals(((BLangFunction) node).name.value);
}
private void analyzeModuleInitFunc(BLangFunction funcNode) {
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
this.currDependentSymbolDeque.push(funcNode.symbol);
SymbolEnv moduleInitFuncEnv = SymbolEnv.createModuleInitFunctionEnv(funcNode, funcNode.symbol.scope, env);
for (BLangAnnotationAttachment bLangAnnotationAttachment : funcNode.annAttachments) {
analyzeNode(bLangAnnotationAttachment.expr, env);
}
analyzeNode(funcNode.body, moduleInitFuncEnv);
this.currDependentSymbolDeque.pop();
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
}
private void checkForUninitializedGlobalVars(List<BLangVariable> globalVars) {
for (BLangVariable globalVar : globalVars) {
if (globalVar.getKind() == NodeKind.VARIABLE && this.uninitializedVars.containsKey(globalVar.symbol)) {
this.dlog.error(globalVar.pos, DiagnosticErrorCode.UNINITIALIZED_VARIABLE, globalVar.symbol);
}
}
}
@Override
public void visit(BLangResourceFunction funcNode) {
visit((BLangFunction) funcNode);
}
@Override
public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
this.currDependentSymbolDeque.push(funcNode.symbol);
funcNode.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env));
funcNode.requiredParams.forEach(param -> analyzeNode(param, funcEnv));
analyzeNode(funcNode.restParam, funcEnv);
if (funcNode.flagSet.contains(Flag.OBJECT_CTOR)) {
visitFunctionBodyWithDynamicEnv(funcNode, funcEnv);
} else {
analyzeBranch(funcNode.body, funcEnv);
}
this.currDependentSymbolDeque.pop();
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
}
private void visitFunctionBodyWithDynamicEnv(BLangFunction funcNode, SymbolEnv funcEnv) {
Map<BSymbol, Location> prevUnusedLocalVariables = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
this.unusedLocalVariables.putAll(prevUnusedLocalVariables);
Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars;
this.uninitializedVars = copyUninitializedVars();
this.flowTerminated = false;
analyzeNode(funcNode.body, funcEnv);
this.uninitializedVars = prevUninitializedVars;
prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol));
this.unusedLocalVariables.keySet().removeAll(prevUnusedLocalVariables.keySet());
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVariables;
}
@Override
public void visit(BLangBlockFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
bodyEnv.isModuleInit = env.isModuleInit;
for (BLangStatement statement : body.stmts) {
analyzeNode(statement, bodyEnv);
}
}
@Override
public void visit(BLangExprFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
analyzeNode(body.expr, bodyEnv);
}
@Override
public void visit(BLangExternalFunctionBody body) {
}
@Override
public void visit(BLangBlockStmt blockNode) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);
blockNode.stmts.forEach(statement -> analyzeNode(statement, blockEnv));
}
@Override
public void visit(BLangLetExpression letExpression) {
for (BLangLetVariable letVarDeclaration : letExpression.letVarDeclarations) {
analyzeNode((BLangNode) letVarDeclaration.definitionNode, letExpression.env);
}
analyzeNode(letExpression.expr, letExpression.env);
}
@Override
public void visit(BLangCompilationUnit compUnit) {
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
}
@Override
public void visit(BLangService service) {
this.currDependentSymbolDeque.push(service.serviceClass.symbol);
visit(service.serviceClass);
for (BLangExpression attachedExpr : service.attachedExprs) {
analyzeNode(attachedExpr, env);
}
service.annAttachments.forEach(bLangAnnotationAttachment -> analyzeNode(bLangAnnotationAttachment.expr, env));
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
SymbolEnv typeDefEnv;
BSymbol symbol = typeDefinition.symbol;
if (typeDefinition.symbol.kind == SymbolKind.TYPE_DEF) {
symbol = symbol.type.tsymbol;
}
typeDefEnv = SymbolEnv.createTypeEnv(typeDefinition.typeNode, symbol.scope, env);
this.currDependentSymbolDeque.push(symbol);
analyzeNode(typeDefinition.typeNode, typeDefEnv);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangClassDefinition classDef) {
SymbolEnv preEnv = env;
SymbolEnv env = this.env;
Map<BSymbol, Location> prevUnusedLocalVariables = null;
Map<BSymbol, InitStatus> prevUninitializedVars = null;
boolean visitedOCE = false;
if (classDef.flagSet.contains(Flag.OBJECT_CTOR) && classDef.oceEnvData.capturedClosureEnv != null &&
classDef.oceEnvData.capturedClosureEnv.enclEnv != null) {
env = classDef.oceEnvData.capturedClosureEnv.enclEnv;
prevUnusedLocalVariables = this.unusedLocalVariables;
prevUninitializedVars = this.uninitializedVars;
this.unusedLocalVariables = new HashMap<>();
this.unusedLocalVariables.putAll(prevUnusedLocalVariables);
this.uninitializedVars = copyUninitializedVars();
this.flowTerminated = false;
visitedOCE = true;
}
SymbolEnv objectEnv = SymbolEnv.createClassEnv(classDef, classDef.symbol.scope, env);
this.currDependentSymbolDeque.push(classDef.symbol);
for (BLangAnnotationAttachment bLangAnnotationAttachment : classDef.annAttachments) {
analyzeNode(bLangAnnotationAttachment.expr, env);
}
classDef.fields.forEach(field -> analyzeNode(field, objectEnv));
classDef.referencedFields.forEach(field -> analyzeNode(field, objectEnv));
if (classDef.initFunction != null) {
if (classDef.initFunction.body == null) {
Optional<BLangFunction> outerFuncDef =
objectEnv.enclPkg.functions.stream()
.filter(f -> f.symbol.name.equals((classDef.initFunction).symbol.name))
.findFirst();
outerFuncDef.ifPresent(bLangFunction -> classDef.initFunction = bLangFunction);
}
if (classDef.initFunction.body != null) {
Map<BSymbol, Location> prevUnusedLocalVars = this.unusedLocalVariables;
this.unusedLocalVariables = new HashMap<>();
if (classDef.initFunction.body.getKind() == NodeKind.BLOCK_FUNCTION_BODY) {
for (BLangStatement statement :
((BLangBlockFunctionBody) classDef.initFunction.body).stmts) {
analyzeNode(statement, objectEnv);
}
} else if (classDef.initFunction.body.getKind() == NodeKind.EXPR_FUNCTION_BODY) {
analyzeNode(((BLangExprFunctionBody) classDef.initFunction.body).expr, objectEnv);
}
emitUnusedVariableWarnings(this.unusedLocalVariables);
this.unusedLocalVariables = prevUnusedLocalVars;
}
}
Stream.concat(classDef.fields.stream(), classDef.referencedFields.stream())
.map(field -> {
addTypeDependency(classDef.symbol, field.getBType(), new HashSet<>());
return field; })
.filter(field -> !Symbols.isPrivate(field.symbol))
.forEach(field -> {
if (this.uninitializedVars.containsKey(field.symbol)) {
this.dlog.error(field.pos, DiagnosticErrorCode.OBJECT_UNINITIALIZED_FIELD, field.symbol);
}
});
for (BLangFunction function : classDef.functions) {
analyzeNode(function, env);
}
for (BLangType type : classDef.typeRefs) {
analyzeNode(type, env);
}
this.env = preEnv;
if (visitedOCE) {
this.uninitializedVars = prevUninitializedVars;
prevUnusedLocalVariables.keySet().removeIf(bSymbol -> !this.unusedLocalVariables.containsKey(bSymbol));
this.unusedLocalVariables = prevUnusedLocalVariables;
}
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangObjectConstructorExpression objectConstructorExpression) {
BLangClassDefinition classDef = objectConstructorExpression.classNode;
if (classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
OCEDynamicEnvironmentData oceData = classDef.oceEnvData;
for (BSymbol symbol : oceData.closureFuncSymbols) {
this.unusedLocalVariables.remove(symbol);
}
for (BSymbol symbol : oceData.closureBlockSymbols) {
this.unusedLocalVariables.remove(symbol);
}
}
visit(objectConstructorExpression.classNode);
visit(objectConstructorExpression.typeInit);
addDependency(objectConstructorExpression.getBType().tsymbol, objectConstructorExpression.classNode.symbol);
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
BLangSimpleVariable var = varDefNode.var;
if (var.expr == null) {
addUninitializedVar(var);
analyzeNode(var.typeNode, env);
BVarSymbol symbol = var.symbol;
if (var.getKind() == NodeKind.VARIABLE && isLocalVariableDefinedWithNonWildCardBindingPattern(var)) {
this.unusedLocalVariables.put(symbol, var.pos);
}
return;
}
analyzeNode(var, env);
}
@Override
public void visit(BLangSimpleVariable variable) {
BVarSymbol symbol = variable.symbol;
analyzeNode(variable.typeNode, env);
if (symbol == null) {
if (variable.expr != null) {
analyzeNode(variable.expr, env);
}
return;
}
this.currDependentSymbolDeque.push(symbol);
if (variable.typeNode != null && variable.typeNode.getBType() != null) {
BType type = variable.typeNode.getBType();
recordGlobalVariableReferenceRelationship(Types.getReferredType(type).tsymbol);
}
boolean withInModuleVarLetExpr = symbol.owner.tag == SymTag.LET && isGlobalVarSymbol(env.enclVarSym);
if (withInModuleVarLetExpr) {
BVarSymbol dependentVar = env.enclVarSym;
this.currDependentSymbolDeque.push(dependentVar);
}
try {
boolean varWithInferredTypeIncludingError = false;
if (variable.isDeclaredWithVar) {
varWithInferredTypeIncludingError = addVarIfInferredTypeIncludesError(variable);
}
if (!varWithInferredTypeIncludingError &&
isLocalVariableDefinedWithNonWildCardBindingPattern(variable) &&
!isVariableDeclaredForWorkerDeclaration(variable)) {
this.unusedLocalVariables.put(symbol, variable.pos);
}
if (variable.expr != null) {
analyzeNode(variable.expr, env);
this.uninitializedVars.remove(symbol);
return;
}
long varFlags = symbol.flags;
if (Symbols.isFlagOn(varFlags, Flags.CONFIGURABLE) && Symbols.isFlagOn(varFlags, Flags.REQUIRED)) {
return;
}
BSymbol owner = symbol.owner;
if (owner.tag != SymTag.PACKAGE && owner.tag != SymTag.OBJECT) {
return;
}
addUninitializedVar(variable);
} finally {
if (withInModuleVarLetExpr) {
this.currDependentSymbolDeque.pop();
}
this.currDependentSymbolDeque.pop();
}
}
private boolean isVariableDeclaredForWorkerDeclaration(BLangSimpleVariable variable) {
BLangExpression expr = variable.expr;
if (expr == null) {
return false;
}
if (Symbols.isFlagOn(variable.symbol.flags, Flags.WORKER)) {
return true;
}
return expr.getKind() == NodeKind.LAMBDA && ((BLangLambdaFunction) expr).function.flagSet.contains(Flag.WORKER);
}
@Override
public void visit(BLangAssignment assignment) {
analyzeNode(assignment.expr, env);
checkAssignment(assignment.varRef);
}
@Override
public void visit(BLangCompoundAssignment compoundAssignNode) {
analyzeNode(compoundAssignNode.expr, env);
analyzeNode(compoundAssignNode.varRef, env);
checkAssignment(compoundAssignNode.varRef);
this.uninitializedVars.remove(compoundAssignNode.varRef.symbol);
}
@Override
public void visit(BLangBreak breakNode) {
terminateFlow();
}
@Override
public void visit(BLangReturn returnNode) {
analyzeNode(returnNode.expr, env);
terminateFlow();
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmt) {
analyzeNode(xmlnsStmt.xmlnsDecl, env);
}
@Override
public void visit(BLangIf ifNode) {
analyzeNode(ifNode.expr, env);
BranchResult ifResult = analyzeBranch(ifNode.body, env);
BranchResult elseResult = analyzeBranch(ifNode.elseStmt, env);
if (ifResult.flowTerminated) {
this.uninitializedVars = elseResult.uninitializedVars;
return;
}
if (elseResult.flowTerminated ||
ConditionResolver.checkConstCondition(types, symTable, ifNode.expr) == symTable.trueType) {
this.uninitializedVars = ifResult.uninitializedVars;
return;
}
this.uninitializedVars = mergeUninitializedVars(ifResult.uninitializedVars, elseResult.uninitializedVars);
}
@Override
public void visit(BLangMatchStatement matchStatement) {
analyzeNode(matchStatement.expr, env);
if (matchStatement.onFailClause != null) {
analyzeNode(matchStatement.onFailClause, env);
}
Map<BSymbol, InitStatus> uninitVars = new HashMap<>();
BranchResult lastPatternResult = null;
for (int i = 0; i < matchStatement.getMatchClauses().size(); i++) {
BLangMatchClause matchClause = matchStatement.getMatchClauses().get(i);
if (isLastPatternContainsIn(matchClause)) {
lastPatternResult = analyzeBranch(matchClause, env);
} else {
BranchResult result = analyzeBranch(matchClause, env);
if (result.flowTerminated) {
continue;
}
uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars);
}
}
if (lastPatternResult != null) {
uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars);
this.uninitializedVars = uninitVars;
return;
}
uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars);
this.uninitializedVars = uninitVars;
}
@Override
public void visit(BLangMatchClause matchClause) {
Location pos = matchClause.pos;
for (BVarSymbol symbol : matchClause.declaredVars.values()) {
if (!isWildCardBindingPattern(symbol)) {
this.unusedLocalVariables.put(symbol, pos);
}
}
analyzeNode(matchClause.matchGuard, env);
analyzeNode(matchClause.blockStmt, env);
}
@Override
public void visit(BLangMatchGuard matchGuard) {
analyzeNode(matchGuard.expr, env);
}
private boolean isLastPatternContainsIn(BLangMatchClause matchClause) {
for (BLangMatchPattern pattern : matchClause.matchPatterns) {
if (pattern.isLastPattern) {
return true;
}
}
return false;
}
@Override
public void visit(BLangMatch match) {
analyzeNode(match.expr, env);
if (match.onFailClause != null) {
analyzeNode(match.onFailClause, env);
}
Map<BSymbol, InitStatus> uninitVars = new HashMap<>();
BranchResult lastPatternResult = null;
for (BLangMatch.BLangMatchBindingPatternClause patternClause : match.patternClauses) {
if (patternClause.isLastPattern) {
lastPatternResult = analyzeBranch(patternClause, env);
} else {
BranchResult result = analyzeBranch(patternClause, env);
if (result.flowTerminated) {
continue;
}
uninitVars = mergeUninitializedVars(uninitVars, result.uninitializedVars);
}
}
if (lastPatternResult != null) {
uninitVars = mergeUninitializedVars(uninitVars, lastPatternResult.uninitializedVars);
this.uninitializedVars = uninitVars;
return;
}
uninitVars = mergeUninitializedVars(new HashMap<>(), this.uninitializedVars);
this.uninitializedVars = uninitVars;
}
@Override
public void visit(BLangForeach foreach) {
BLangExpression collection = foreach.collection;
if (isNotRangeExpr(collection)) {
populateUnusedVariableMapForMembers(this.unusedLocalVariables,
(BLangVariable) foreach.variableDefinitionNode.getVariable());
}
analyzeNode(collection, env);
analyzeNode(foreach.body, env);
if (foreach.onFailClause != null) {
analyzeNode(foreach.onFailClause, env);
}
}
@Override
public void visit(BLangQueryAction queryAction) {
for (BLangNode clause : queryAction.getQueryClauses()) {
analyzeNode(clause, env);
}
}
@Override
public void visit(BLangWhile whileNode) {
Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars;
analyzeNode(whileNode.expr, env);
BranchResult whileResult = analyzeBranch(whileNode.body, env);
if (whileNode.onFailClause != null) {
analyzeNode(whileNode.onFailClause, env);
}
BType constCondition = ConditionResolver.checkConstCondition(types, symTable, whileNode.expr);
if (constCondition == symTable.falseType) {
this.uninitializedVars = prevUninitializedVars;
return;
}
if (whileResult.flowTerminated || constCondition == symTable.trueType) {
this.uninitializedVars = whileResult.uninitializedVars;
return;
}
this.uninitializedVars = mergeUninitializedVars(this.uninitializedVars, whileResult.uninitializedVars);
}
@Override
public void visit(BLangDo doNode) {
analyzeNode(doNode.body, env);
if (doNode.onFailClause != null) {
analyzeNode(doNode.onFailClause, env);
}
}
public void visit(BLangFail failNode) {
analyzeNode(failNode.expr, env);
}
@Override
public void visit(BLangLock lockNode) {
analyzeNode(lockNode.body, this.env);
if (lockNode.onFailClause != null) {
analyzeNode(lockNode.onFailClause, env);
}
}
@Override
public void visit(BLangTransaction transactionNode) {
analyzeNode(transactionNode.transactionBody, env);
if (transactionNode.onFailClause != null) {
analyzeNode(transactionNode.onFailClause, env);
}
Name transactionPkgName = names.fromString(Names.DOT.value + Names.TRANSACTION_PACKAGE.value);
Name compUnitName = names.fromString(transactionNode.pos.lineRange().filePath());
this.symResolver.resolvePrefixSymbol(env, transactionPkgName, compUnitName);
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
}
@Override
public void visit(BLangCommitExpr commitExpr) {
}
@Override
public void visit(BLangRollback rollbackNode) {
analyzeNode(rollbackNode.expr, env);
}
@Override
public void visit(BLangTupleDestructure stmt) {
analyzeNode(stmt.expr, env);
checkAssignment(stmt.varRef);
}
@Override
public void visit(BLangForkJoin forkJoin) {
/* ignore */
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
analyzeNode(workerSendNode.expr, env);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
analyzeNode(syncSendExpr.expr, env);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
}
@Override
public void visit(BLangLiteral literalExpr) {
}
@Override
public void visit(BLangConstRef constRef) {
}
@Override
public void visit(BLangListConstructorExpr listConstructorExpr) {
for (BLangExpression expr : listConstructorExpr.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorExpr.BLangListConstructorSpreadOpExpr) expr).expr;
}
analyzeNode(expr, env);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
tableConstructorExpr.recordLiteralList.forEach(expr -> analyzeNode(expr, env));
checkForDuplicateKeys(tableConstructorExpr);
}
private void checkForDuplicateKeys(BLangTableConstructorExpr tableConstructorExpr) {
Set<Integer> keyHashSet = new HashSet<>();
List<String> fieldNames = getFieldNames(tableConstructorExpr);
HashMap<Integer, List<BLangExpression>> keyValues = new HashMap<>();
if (!fieldNames.isEmpty()) {
for (BLangRecordLiteral literal : tableConstructorExpr.recordLiteralList) {
List<BLangExpression> keyArray = createKeyArray(literal, fieldNames);
int hashInt = generateHash(keyArray);
if (!keyHashSet.add(hashInt) && checkForKeyEquality(keyValues, keyArray, hashInt)) {
String fields = String.join(", ", fieldNames);
String values = keyArray.stream().map(Object::toString).collect(Collectors.joining(", "));
dlog.error(literal.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_TABLE_LITERAL, fields, values);
}
keyValues.put(hashInt, keyArray);
}
}
}
private boolean checkForKeyEquality(HashMap<Integer, List<BLangExpression>> keyValues,
List<BLangExpression> keyArray, int hash) {
List<BLangExpression> existingExpList = keyValues.get(hash);
boolean isEqual = false;
if (existingExpList.size() == keyArray.size()) {
isEqual = true;
for (int i = 0; i < keyArray.size(); i++) {
isEqual = isEqual && equality(keyArray.get(i), existingExpList.get(i));
}
}
return isEqual;
}
private int generateHash(List<BLangExpression> keyArray) {
int result = 0;
for (BLangExpression expr : keyArray) {
result = 31 * result + hash(expr);
}
return result;
}
public boolean equality(Node nodeA, Node nodeB) {
if (nodeA == null || nodeB == null) {
return nodeA == nodeB;
}
if (nodeA.getKind() != nodeB.getKind()) {
return false;
}
boolean isEqual = true;
switch (nodeA.getKind()) {
case RECORD_LITERAL_EXPR:
BLangRecordLiteral recordLiteralA = (BLangRecordLiteral) nodeA;
BLangRecordLiteral recordLiteralB = (BLangRecordLiteral) nodeB;
for (int i = 0; isEqual && i < recordLiteralA.fields.size(); i++) {
RecordLiteralNode.RecordField exprA = recordLiteralA.fields.get(i);
RecordLiteralNode.RecordField exprB = recordLiteralB.fields.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case RECORD_LITERAL_KEY_VALUE:
BLangRecordLiteral.BLangRecordKeyValueField fieldA =
(BLangRecordLiteral.BLangRecordKeyValueField) nodeA;
BLangRecordLiteral.BLangRecordKeyValueField fieldB =
(BLangRecordLiteral.BLangRecordKeyValueField) nodeB;
return equality(fieldA.valueExpr, fieldB.valueExpr);
case LITERAL:
case NUMERIC_LITERAL:
BLangLiteral literalA = (BLangLiteral) nodeA;
BLangLiteral literalB = (BLangLiteral) nodeB;
return Objects.equals(literalA.value, literalB.value);
case XML_TEXT_LITERAL:
BLangXMLTextLiteral textLiteralA = (BLangXMLTextLiteral) nodeA;
BLangXMLTextLiteral textLiteralB = (BLangXMLTextLiteral) nodeB;
isEqual = equality(textLiteralA.concatExpr, textLiteralB.concatExpr);
for (int i = 0; isEqual && i < textLiteralA.textFragments.size(); i++) {
BLangExpression exprA = textLiteralA.textFragments.get(i);
BLangExpression exprB = textLiteralB.textFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XML_ATTRIBUTE:
BLangXMLAttribute attributeA = (BLangXMLAttribute) nodeA;
BLangXMLAttribute attributeB = (BLangXMLAttribute) nodeB;
return equality(attributeA.name, attributeB.name) && equality(attributeA.value, attributeB.value);
case XML_QNAME:
BLangXMLQName xmlqNameA = (BLangXMLQName) nodeA;
BLangXMLQName xmlqNameB = (BLangXMLQName) nodeA;
return equality(xmlqNameA.localname, xmlqNameB.localname)
&& equality(xmlqNameA.prefix, xmlqNameB.prefix);
case XML_ELEMENT_LITERAL:
BLangXMLElementLiteral eleLiteralA = (BLangXMLElementLiteral) nodeA;
BLangXMLElementLiteral eleLiteralB = (BLangXMLElementLiteral) nodeB;
isEqual = equality(eleLiteralA.startTagName, eleLiteralB.startTagName)
&& equality(eleLiteralA.endTagName, eleLiteralB.endTagName);
for (int i = 0; isEqual && i < eleLiteralA.attributes.size(); i++) {
BLangExpression exprA = eleLiteralA.attributes.get(i);
BLangExpression exprB = eleLiteralB.attributes.get(i);
isEqual = equality(exprA, exprB);
}
for (int i = 0; isEqual && i < eleLiteralA.children.size(); i++) {
BLangExpression exprA = eleLiteralA.children.get(i);
BLangExpression exprB = eleLiteralB.children.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XML_COMMENT_LITERAL:
BLangXMLCommentLiteral commentliteralA = (BLangXMLCommentLiteral) nodeA;
BLangXMLCommentLiteral commentliteralB = (BLangXMLCommentLiteral) nodeB;
isEqual = equality(commentliteralA.concatExpr, commentliteralB.concatExpr);
for (int i = 0; isEqual && i < commentliteralA.textFragments.size(); i++) {
BLangExpression exprA = commentliteralA.textFragments.get(i);
BLangExpression exprB = commentliteralB.textFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XML_QUOTED_STRING:
BLangXMLQuotedString quotedLiteralA = (BLangXMLQuotedString) nodeA;
BLangXMLQuotedString quotedLiteralB = (BLangXMLQuotedString) nodeB;
isEqual = equality(quotedLiteralA.concatExpr, quotedLiteralB.concatExpr);
for (int i = 0; isEqual && i < quotedLiteralA.textFragments.size(); i++) {
BLangExpression exprA = quotedLiteralA.textFragments.get(i);
BLangExpression exprB = quotedLiteralB.textFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case XMLNS:
BLangXMLNS xmlnsA = (BLangXMLNS) nodeA;
BLangXMLNS xmlnsB = (BLangXMLNS) nodeB;
return equality(xmlnsA.prefix, xmlnsB.prefix) && equality(xmlnsA.namespaceURI, xmlnsB.namespaceURI);
case XML_PI_LITERAL:
BLangXMLProcInsLiteral insLiteralA = (BLangXMLProcInsLiteral) nodeA;
BLangXMLProcInsLiteral insLiteralB = (BLangXMLProcInsLiteral) nodeB;
isEqual = equality(insLiteralA.target, insLiteralB.target)
&& equality(insLiteralA.dataConcatExpr, insLiteralB.dataConcatExpr);
for (int i = 0; isEqual && i < insLiteralA.dataFragments.size(); i++) {
BLangExpression exprA = insLiteralA.dataFragments.get(i);
BLangExpression exprB = insLiteralB.dataFragments.get(i);
isEqual = equality(exprA, exprB);
}
return isEqual;
case IDENTIFIER:
BLangIdentifier identifierA = (BLangIdentifier) nodeA;
BLangIdentifier identifierB = (BLangIdentifier) nodeB;
return identifierA.value.equals(identifierB.value);
case SIMPLE_VARIABLE_REF:
BLangSimpleVarRef simpleVarRefA = (BLangSimpleVarRef) nodeA;
BLangSimpleVarRef simpleVarRefB = (BLangSimpleVarRef) nodeB;
BSymbol symbolA = simpleVarRefA.symbol;
BSymbol symbolB = simpleVarRefB.symbol;
if (symbolA != null && symbolB != null
&& (Symbols.isFlagOn(symbolA.flags, Flags.CONSTANT)
&& Symbols.isFlagOn(symbolB.flags, Flags.CONSTANT))) {
return (((BConstantSymbol) symbolA).value).value
.equals((((BConstantSymbol) symbolB).value).value);
} else {
return simpleVarRefA.variableName.equals(simpleVarRefB.variableName);
}
case STRING_TEMPLATE_LITERAL:
BLangStringTemplateLiteral stringTemplateLiteralA = (BLangStringTemplateLiteral) nodeA;
BLangStringTemplateLiteral stringTemplateLiteralB = (BLangStringTemplateLiteral) nodeB;
for (int i = 0; isEqual && i < stringTemplateLiteralA.exprs.size(); i++) {
BLangExpression exprA = stringTemplateLiteralA.exprs.get(i);
BLangExpression exprB = stringTemplateLiteralB.exprs.get(i);
isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB);
}
return isEqual;
case LIST_CONSTRUCTOR_EXPR:
BLangListConstructorExpr listConstructorExprA = (BLangListConstructorExpr) nodeA;
BLangListConstructorExpr listConstructorExprB = (BLangListConstructorExpr) nodeB;
for (int i = 0; isEqual && i < listConstructorExprA.exprs.size(); i++) {
BLangExpression exprA = listConstructorExprA.exprs.get(i);
BLangExpression exprB = listConstructorExprB.exprs.get(i);
isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB);
}
return isEqual;
case TABLE_CONSTRUCTOR_EXPR:
BLangTableConstructorExpr tableConstructorExprA = (BLangTableConstructorExpr) nodeA;
BLangTableConstructorExpr tableConstructorExprB = (BLangTableConstructorExpr) nodeB;
for (int i = 0; isEqual && i < tableConstructorExprA.recordLiteralList.size(); i++) {
BLangExpression exprA = tableConstructorExprA.recordLiteralList.get(i);
BLangExpression exprB = tableConstructorExprB.recordLiteralList.get(i);
isEqual = getTypeEquality(exprA.getBType(), exprB.getBType()) && equality(exprA, exprB);
}
return isEqual;
case TYPE_CONVERSION_EXPR:
BLangTypeConversionExpr typeConversionExprA = (BLangTypeConversionExpr) nodeA;
BLangTypeConversionExpr typeConversionExprB = (BLangTypeConversionExpr) nodeB;
return equality(typeConversionExprA.expr, typeConversionExprB.expr);
case BINARY_EXPR:
BLangBinaryExpr binaryExprA = (BLangBinaryExpr) nodeA;
BLangBinaryExpr binaryExprB = (BLangBinaryExpr) nodeB;
return equality(binaryExprA.lhsExpr, binaryExprB.lhsExpr)
&& equality(binaryExprA.rhsExpr, binaryExprB.rhsExpr);
case UNARY_EXPR:
BLangUnaryExpr unaryExprA = (BLangUnaryExpr) nodeA;
BLangUnaryExpr unaryExprB = (BLangUnaryExpr) nodeB;
return equality(unaryExprA.expr, unaryExprB.expr);
case TYPE_TEST_EXPR:
BLangTypeTestExpr typeTestExprA = (BLangTypeTestExpr) nodeA;
BLangTypeTestExpr typeTestExprB = (BLangTypeTestExpr) nodeB;
return equality(typeTestExprA.expr, typeTestExprB.expr);
case TERNARY_EXPR:
BLangTernaryExpr ternaryExprA = (BLangTernaryExpr) nodeA;
BLangTernaryExpr ternaryExprB = (BLangTernaryExpr) nodeB;
return equality(ternaryExprA.expr, ternaryExprB.expr)
&& equality(ternaryExprA.thenExpr, ternaryExprB.thenExpr)
&& equality(ternaryExprA.elseExpr, ternaryExprB.elseExpr);
case GROUP_EXPR:
BLangGroupExpr groupExprA = (BLangGroupExpr) nodeA;
BLangGroupExpr groupExprB = (BLangGroupExpr) nodeA;
return equality(groupExprA.expression, groupExprB.expression);
default:
return false;
}
}
public Integer hash(Node node) {
int result = 0;
if (node == null) {
return result;
}
if (node.getKind() == NodeKind.RECORD_LITERAL_EXPR) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) node;
for (RecordLiteralNode.RecordField entry : recordLiteral.fields) {
result = 31 * result + hash(entry);
}
} else if (node.getKind() == NodeKind.RECORD_LITERAL_KEY_VALUE) {
BLangRecordLiteral.BLangRecordKeyValueField field = (BLangRecordLiteral.BLangRecordKeyValueField) node;
result = 31 * result + hash(field.key.expr) + hash(field.valueExpr);
} else if (node.getKind() == NodeKind.ARRAY_LITERAL_EXPR) {
BLangListConstructorExpr.BLangArrayLiteral arrayLiteral =
(BLangListConstructorExpr.BLangArrayLiteral) node;
for (BLangExpression expr : arrayLiteral.exprs) {
result = 31 * result + hash(expr);
}
} else if (node.getKind() == NodeKind.LITERAL | node.getKind() == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = (BLangLiteral) node;
result = Objects.hash(literal.value);
} else if (node.getKind() == NodeKind.XML_TEXT_LITERAL) {
BLangXMLTextLiteral literal = (BLangXMLTextLiteral) node;
result = 31 * result + hash(literal.concatExpr);
for (BLangExpression expr : literal.textFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.XML_ATTRIBUTE) {
BLangXMLAttribute attribute = (BLangXMLAttribute) node;
result = 31 * result + hash(attribute.name) + hash(attribute.value);
} else if (node.getKind() == NodeKind.XML_QNAME) {
BLangXMLQName xmlqName = (BLangXMLQName) node;
result = 31 * result + hash(xmlqName.localname) + hash(xmlqName.prefix);
} else if (node.getKind() == NodeKind.XML_COMMENT_LITERAL) {
BLangXMLCommentLiteral literal = (BLangXMLCommentLiteral) node;
result = 31 * result + hash(literal.concatExpr);
for (BLangExpression expr : literal.textFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
BLangXMLElementLiteral literal = (BLangXMLElementLiteral) node;
result = 31 * result + hash(literal.startTagName) + hash(literal.endTagName);
for (BLangExpression expr : literal.attributes) {
result = 31 * result + hash(expr);
}
for (BLangExpression expr : literal.children) {
result = 31 * result + hash(expr);
}
} else if (node.getKind() == NodeKind.XML_QUOTED_STRING) {
BLangXMLQuotedString literal = (BLangXMLQuotedString) node;
result = 31 * result + hash(literal.concatExpr);
for (BLangExpression expr : literal.textFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.XMLNS) {
BLangXMLNS xmlns = (BLangXMLNS) node;
result = result * 31 + hash(xmlns.prefix) + hash(xmlns.namespaceURI);
} else if (node.getKind() == NodeKind.XML_PI_LITERAL) {
BLangXMLProcInsLiteral literal = (BLangXMLProcInsLiteral) node;
result = 31 * result + hash(literal.target) + hash(literal.dataConcatExpr);
for (BLangExpression expr : literal.dataFragments) {
result = result * 31 + hash(expr);
}
} else if (node.getKind() == NodeKind.IDENTIFIER) {
BLangIdentifier identifier = (BLangIdentifier) node;
result = identifier.value.hashCode();
} else if (node.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) node;
BSymbol symbol = simpleVarRef.symbol;
if (symbol != null && Symbols.isFlagOn(symbol.flags, Flags.CONSTANT)) {
BConstantSymbol constantSymbol = (BConstantSymbol) symbol;
result = Objects.hash(constantSymbol.value.value);
} else {
result = simpleVarRef.variableName.hashCode();
}
} else if (node.getKind() == NodeKind.STRING_TEMPLATE_LITERAL) {
BLangStringTemplateLiteral stringTemplateLiteral = (BLangStringTemplateLiteral) node;
for (BLangExpression expr : stringTemplateLiteral.exprs) {
result = result * 31 + getTypeHash(stringTemplateLiteral.getBType()) + hash(expr);
}
} else if (node.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr) node;
for (BLangExpression expr : listConstructorExpr.exprs) {
result = result * 31 + getTypeHash(listConstructorExpr.getBType()) + hash(expr);
}
} else if (node.getKind() == NodeKind.TABLE_CONSTRUCTOR_EXPR) {
BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) node;
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
result = result * 31 + getTypeHash(tableConstructorExpr.getBType()) + hash(recordLiteral);
}
} else if (node.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
BLangTypeConversionExpr typeConversionExpr = (BLangTypeConversionExpr) node;
result = 31 * result + hash(typeConversionExpr.expr);
} else if (node.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) node;
result = 31 * result + hash(binaryExpr.lhsExpr) + hash(binaryExpr.rhsExpr);
} else if (node.getKind() == NodeKind.UNARY_EXPR) {
BLangUnaryExpr unaryExpr = (BLangUnaryExpr) node;
result = 31 * result + hash(unaryExpr.expr);
} else if (node.getKind() == NodeKind.TYPE_TEST_EXPR) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) node;
result = 31 * result + hash(typeTestExpr.expr);
} else if (node.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) node;
result = 31 * result + hash(ternaryExpr.expr) + hash(ternaryExpr.thenExpr) + hash(ternaryExpr.elseExpr);
} else if (node.getKind() == NodeKind.GROUP_EXPR) {
BLangGroupExpr groupExpr = (BLangGroupExpr) node;
result = 31 * result + hash(groupExpr.expression);
} else {
dlog.error(((BLangExpression) node).pos, DiagnosticErrorCode.EXPRESSION_IS_NOT_A_CONSTANT_EXPRESSION);
}
return result;
}
private Integer getTypeHash(BType type) {
return Objects.hash(type.tag, type.name);
}
private boolean getTypeEquality(BType typeA, BType typeB) {
return types.isAssignable(typeA, typeB) || types.isAssignable(typeB, typeA);
}
private List<BLangExpression> createKeyArray(BLangRecordLiteral literal, List<String> fieldNames) {
Map<String, BLangExpression> fieldMap = new HashMap<>();
for (RecordLiteralNode.RecordField recordField : literal.fields) {
if (recordField.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyVal =
(BLangRecordLiteral.BLangRecordKeyValueField) recordField;
fieldMap.put(keyVal.key.expr.toString(), keyVal.valueExpr);
} else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangRecordLiteral.BLangRecordVarNameField recordVarNameField =
(BLangRecordLiteral.BLangRecordVarNameField) recordField;
fieldMap.put(recordVarNameField.getVariableName().value, recordVarNameField);
}
}
return fieldNames.stream().map(fieldMap::get).collect(Collectors.toList());
}
private List<String> getFieldNames(BLangTableConstructorExpr constructorExpr) {
List<String> fieldNames = null;
if (Types.getReferredType(constructorExpr.getBType()).tag == TypeTags.TABLE) {
fieldNames = ((BTableType) Types.getReferredType(constructorExpr.getBType())).fieldNameList;
if (fieldNames != null) {
return fieldNames;
}
}
if (constructorExpr.tableKeySpecifier != null &&
!constructorExpr.tableKeySpecifier.fieldNameIdentifierList.isEmpty()) {
BLangTableKeySpecifier tableKeySpecifier = constructorExpr.tableKeySpecifier;
return tableKeySpecifier.fieldNameIdentifierList.stream().map(identifier ->
((BLangIdentifier) identifier).value).collect(Collectors.toList());
} else {
return new ArrayList<>();
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValuePair =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
if (keyValuePair.key.computedKey) {
analyzeNode(keyValuePair.key.expr, env);
}
analyzeNode(keyValuePair.valueExpr, env);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
analyzeNode((BLangRecordLiteral.BLangRecordVarNameField) field, env);
} else {
analyzeNode(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env);
}
}
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
this.unusedErrorVarsDeclaredWithVar.remove(varRefExpr.symbol);
if (isNotVariableReferenceLVExpr(varRefExpr)) {
this.unusedLocalVariables.remove(varRefExpr.symbol);
}
checkVarRef(varRefExpr.symbol, varRefExpr.pos);
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (!fieldAccessExpr.isLValue && isObjectMemberAccessWithSelf(fieldAccessExpr)) {
checkVarRef(fieldAccessExpr.symbol, fieldAccessExpr.pos);
}
analyzeNode(fieldAccessExpr.expr, env);
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
if (!nsPrefixedFieldBasedAccess.isLValue && isObjectMemberAccessWithSelf(nsPrefixedFieldBasedAccess)) {
checkVarRef(nsPrefixedFieldBasedAccess.symbol, nsPrefixedFieldBasedAccess.pos);
}
analyzeNode(nsPrefixedFieldBasedAccess.expr, env);
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
analyzeNode(indexAccessExpr.expr, env);
analyzeNode(indexAccessExpr.indexExpr, env);
}
@Override
public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) {
tableMultiKeyExpr.multiKeyIndexExprs.forEach(value -> analyzeNode(value, env));
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
analyzeNode(xmlElementAccess.expr, env);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
analyzeNode(xmlNavigation.expr, env);
if (xmlNavigation.childIndex == null) {
analyzeNode(xmlNavigation.childIndex, env);
}
}
@Override
public void visit(BLangInvocation invocationExpr) {
analyzeNode(invocationExpr.expr, env);
BSymbol symbol = invocationExpr.symbol;
this.unusedLocalVariables.remove(symbol);
if (isFunctionOrMethodDefinedInCurrentModule(symbol.owner, env) &&
!isGlobalVarsInitialized(invocationExpr.pos, invocationExpr)) {
checkVarRef(symbol, invocationExpr.pos);
return;
}
if (!isFieldsInitializedForSelfArgument(invocationExpr)) {
return;
}
if (!isFieldsInitializedForSelfInvocation(invocationExpr.requiredArgs, invocationExpr.pos)) {
return;
}
if (!isFieldsInitializedForSelfInvocation(invocationExpr.restArgs, invocationExpr.pos)) {
return;
}
checkVarRef(symbol, invocationExpr.pos);
invocationExpr.requiredArgs.forEach(expr -> analyzeNode(expr, env));
invocationExpr.restArgs.forEach(expr -> analyzeNode(expr, env));
BSymbol owner = this.env.scope.owner;
if (owner.kind == SymbolKind.FUNCTION) {
BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) owner;
Name name = names.fromIdNode(invocationExpr.name);
BSymbol dependsOnFunctionSym = symResolver.lookupSymbolInMainSpace(this.env, name);
if (symTable.notFoundSymbol != dependsOnFunctionSym) {
addDependency(invokableOwnerSymbol, dependsOnFunctionSym);
}
} else if (symbol != null && symbol.kind == SymbolKind.FUNCTION) {
BInvokableSymbol invokableProviderSymbol = (BInvokableSymbol) symbol;
BSymbol curDependent = this.currDependentSymbolDeque.peek();
if (curDependent != null && isGlobalVarSymbol(curDependent)) {
addDependency(curDependent, invokableProviderSymbol);
}
}
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
for (BLangExpression positionalArg : errorConstructorExpr.positionalArgs) {
analyzeNode(positionalArg, env);
}
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
analyzeNode(namedArg, env);
}
}
@Override
public void visit(BLangActionInvocation actionInvocation) {
this.visit((BLangInvocation) actionInvocation);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
for (BLangNode clause : queryExpr.getQueryClauses()) {
analyzeNode(clause, env);
}
}
@Override
public void visit(BLangFromClause fromClause) {
BLangExpression collection = fromClause.collection;
if (isNotRangeExpr(collection)) {
populateUnusedVariableMapForMembers(this.unusedLocalVariables,
(BLangVariable) fromClause.variableDefinitionNode.getVariable());
}
analyzeNode(collection, env);
}
@Override
public void visit(BLangJoinClause joinClause) {
populateUnusedVariableMapForMembers(this.unusedLocalVariables,
(BLangVariable) joinClause.variableDefinitionNode.getVariable());
analyzeNode(joinClause.collection, env);
if (joinClause.onClause != null) {
analyzeNode((BLangNode) joinClause.onClause, env);
}
}
@Override
public void visit(BLangLetClause letClause) {
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
analyzeNode((BLangNode) letVariable.definitionNode, env);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
analyzeNode(whereClause.expression, env);
}
@Override
public void visit(BLangOnClause onClause) {
analyzeNode(onClause.lhsExpr, env);
analyzeNode(onClause.rhsExpr, env);
}
@Override
public void visit(BLangOrderKey orderKeyClause) {
analyzeNode(orderKeyClause.expression, env);
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.orderByKeyList.forEach(value -> analyzeNode((BLangNode) value, env));
}
@Override
public void visit(BLangSelectClause selectClause) {
analyzeNode(selectClause.expression, env);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
analyzeNode(onConflictClause.expression, env);
}
@Override
public void visit(BLangLimitClause limitClause) {
analyzeNode(limitClause.expression, env);
}
@Override
public void visit(BLangDoClause doClause) {
analyzeNode(doClause.body, env);
}
@Override
public void visit(BLangOnFailClause onFailClause) {
analyzeNode((BLangVariable) onFailClause.variableDefinitionNode.getVariable(), env);
analyzeNode(onFailClause.body, env);
}
private boolean isFieldsInitializedForSelfArgument(BLangInvocation invocationExpr) {
if (invocationExpr.expr == null || !isSelfKeyWordExpr(invocationExpr.expr)) {
return true;
}
StringBuilder uninitializedFields =
getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef)
invocationExpr.expr).symbol.type);
if (uninitializedFields.length() != 0) {
this.dlog.error(invocationExpr.pos, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS,
uninitializedFields.toString());
return false;
}
return true;
}
private boolean isFieldsInitializedForSelfInvocation(List<BLangExpression> argExpressions,
Location location) {
for (BLangExpression expr : argExpressions) {
if (isSelfKeyWordExpr(expr)) {
StringBuilder uninitializedFields =
getUninitializedFieldsForSelfKeyword((BObjectType) ((BLangSimpleVarRef) expr).symbol.type);
if (uninitializedFields.length() != 0) {
this.dlog.error(location, DiagnosticErrorCode.CONTAINS_UNINITIALIZED_FIELDS,
uninitializedFields.toString());
return false;
}
}
}
return true;
}
private boolean isSelfKeyWordExpr(BLangExpression expr) {
return expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
Names.SELF.value.equals(((BLangSimpleVarRef) expr).getVariableName().getValue());
}
private StringBuilder getUninitializedFieldsForSelfKeyword(BObjectType objType) {
boolean isFirstUninitializedField = true;
StringBuilder uninitializedFields = new StringBuilder();
for (BField field : objType.fields.values()) {
if (this.uninitializedVars.containsKey(field.symbol)) {
if (isFirstUninitializedField) {
uninitializedFields = new StringBuilder(field.symbol.getName().value);
isFirstUninitializedField = false;
} else {
uninitializedFields.append(", ").append(field.symbol.getName().value);
}
}
}
return uninitializedFields;
}
private boolean isGlobalVarSymbol(BSymbol symbol) {
if (symbol == null) {
return false;
} else if (symbol.owner == null) {
return false;
} else if (symbol.owner.tag != SymTag.PACKAGE) {
return false;
}
return isVariableOrConstant(symbol);
}
private boolean isVariableOrConstant(BSymbol symbol) {
if (symbol == null) {
return false;
}
return ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) ||
((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT);
}
/**
* Register dependent symbol to the provider symbol.
* Let global int a = b, a depend on b.
* Let func foo() { returns b + 1; }, where b is a global var, then foo depends on b.
*
* @param dependent dependent.
* @param provider object which provides a value.
*/
private void addDependency(BSymbol dependent, BSymbol provider) {
if (provider == null || dependent == null || dependent.pkgID != provider.pkgID) {
return;
}
Set<BSymbol> providers = globalNodeDependsOn.computeIfAbsent(dependent, s -> new LinkedHashSet<>());
providers.add(provider);
addFunctionToGlobalVarDependency(dependent, provider);
}
private void addFunctionToGlobalVarDependency(BSymbol dependent, BSymbol provider) {
if (dependent.kind != SymbolKind.FUNCTION && !isGlobalVarSymbol(dependent)) {
return;
}
if (isVariableOrConstant(provider) && !isGlobalVarSymbol(provider)) {
return;
}
Set<BSymbol> providers = this.functionToDependency.computeIfAbsent(dependent, s -> new HashSet<>());
providers.add(provider);
}
@Override
public void visit(BLangTypeInit typeInitExpr) {
typeInitExpr.argsExpr.forEach(argExpr -> analyzeNode(argExpr, env));
if (this.currDependentSymbolDeque.peek() != null) {
addDependency(this.currDependentSymbolDeque.peek(),
Types.getReferredType(typeInitExpr.getBType()).tsymbol);
}
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
analyzeNode(ternaryExpr.expr, env);
analyzeNode(ternaryExpr.thenExpr, env);
analyzeNode(ternaryExpr.elseExpr, env);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
analyzeNode(waitExpr.getExpression(), env);
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
}
@Override
public void visit(BLangWaitForAllExpr waitForAllExpr) {
waitForAllExpr.keyValuePairs.forEach(keyValue -> {
BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr;
analyzeNode(expr, env);
});
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
analyzeNode(binaryExpr.lhsExpr, env);
analyzeNode(binaryExpr.rhsExpr, env);
}
@Override
public void visit(BLangElvisExpr elvisExpr) {
analyzeNode(elvisExpr.lhsExpr, env);
analyzeNode(elvisExpr.rhsExpr, env);
}
@Override
public void visit(BLangGroupExpr groupExpr) {
analyzeNode(groupExpr.expression, env);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
analyzeNode(unaryExpr.expr, env);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
analyzeNode(conversionExpr.expr, env);
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
analyzeNode(xmlAttribute.value, env);
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.children.forEach(expr -> analyzeNode(expr, env));
xmlElementLiteral.attributes.forEach(expr -> analyzeNode(expr, env));
xmlElementLiteral.inlineNamespaces.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.textFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.textFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.dataFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.textFragments.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
stringTemplateLiteral.exprs.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
for (BLangLiteral string : rawTemplateLiteral.strings) {
analyzeNode(string, env);
}
for (BLangExpression expr : rawTemplateLiteral.insertions) {
analyzeNode(expr, env);
}
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
BLangFunction funcNode = bLangLambdaFunction.function;
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
visitFunctionBodyWithDynamicEnv(funcNode, funcEnv);
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
analyzeNode(bLangVarArgsExpression.expr, env);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
analyzeNode(bLangNamedArgsExpression.expr, env);
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
}
@Override
public void visit(BLangMatchExpression matchExpression) {
analyzeNode(matchExpression.expr, env);
matchExpression.patternClauses.forEach(pattern -> analyzeNode(pattern, env));
}
@Override
public void visit(BLangMatchExprPatternClause matchExprPatternClause) {
analyzeNode(matchExprPatternClause.expr, env);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
analyzeNode(checkedExpr.expr, env);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanicExpr) {
analyzeNode(checkPanicExpr.expr, env);
}
@Override
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
bLangXMLSequenceLiteral.xmlItems.forEach(xml -> analyzeNode(xml, env));
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
analyzeNode(exprStmtNode.expr, env);
}
@Override
public void visit(BLangAnnotation annotationNode) {
}
@Override
public void visit(BLangAnnotationAttachment annAttachmentNode) {
}
@Override
public void visit(BLangRetry retryNode) {
analyzeNode(retryNode.retryBody, env);
if (retryNode.onFailClause != null) {
analyzeNode(retryNode.onFailClause, env);
}
}
@Override
public void visit(BLangRetryTransaction retryTransaction) {
analyzeNode(retryTransaction.transaction, env);
}
@Override
public void visit(BLangContinue continueNode) {
terminateFlow();
}
@Override
public void visit(BLangTypedescExpr accessExpr) {
}
@Override
public void visit(BLangXMLQName xmlQName) {
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
for (ClosureVarSymbol closureVarSymbol : bLangArrowFunction.closureVarSymbols) {
BSymbol symbol = closureVarSymbol.bSymbol;
if (this.uninitializedVars.containsKey(symbol)) {
this.dlog.error(closureVarSymbol.diagnosticLocation,
DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol);
}
this.unusedErrorVarsDeclaredWithVar.remove(symbol);
this.unusedLocalVariables.remove(symbol);
}
}
@Override
public void visit(BLangValueType valueType) {
}
@Override
public void visit(BLangConstant constant) {
boolean validVariable = constant.symbol != null;
if (validVariable) {
this.currDependentSymbolDeque.push(constant.symbol);
}
try {
analyzeNode(constant.expr, env);
} finally {
if (validVariable) {
this.currDependentSymbolDeque.pop();
}
}
}
@Override
public void visit(BLangArrayType arrayType) {
analyzeNode(arrayType.getElementType(), env);
}
@Override
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
}
@Override
public void visit(BLangConstrainedType constrainedType) {
analyzeNode(constrainedType.constraint, env);
}
@Override
public void visit(BLangStreamType streamType) {
analyzeNode(streamType.constraint, env);
analyzeNode(streamType.error, env);
}
@Override
public void visit(BLangTableTypeNode tableType) {
analyzeNode(tableType.constraint, env);
if (tableType.tableKeyTypeConstraint != null) {
analyzeNode(tableType.tableKeyTypeConstraint.keyType, env);
}
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
if (this.currDependentSymbolDeque.isEmpty()) {
return;
}
BType resolvedType = Types.getReferredType(userDefinedType.getBType());
if (resolvedType == symTable.semanticError) {
return;
}
BTypeSymbol tsymbol = resolvedType.tsymbol;
recordGlobalVariableReferenceRelationship(tsymbol);
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
if (functionTypeNode.flagSet.contains(Flag.ANY_FUNCTION)) {
return;
}
functionTypeNode.params.forEach(param -> analyzeNode(param.typeNode, env));
analyzeNode(functionTypeNode.returnTypeNode, env);
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
unionTypeNode.memberTypeNodes.forEach(typeNode -> analyzeNode(typeNode, env));
}
@Override
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) {
analyzeNode(constituentTypeNode, env);
}
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
BTypeSymbol tsymbol = Types.getReferredType(recordTypeNode.getBType()).tsymbol;
for (TypeNode type : recordTypeNode.getTypeReferences()) {
BLangType bLangType = (BLangType) type;
analyzeNode(bLangType, env);
recordGlobalVariableReferenceRelationship(
Types.getReferredType(bLangType.getBType()).tsymbol);
}
for (BLangSimpleVariable field : recordTypeNode.fields) {
addTypeDependency(tsymbol, Types.getReferredType(field.getBType()), new HashSet<>());
analyzeNode(field, env);
recordGlobalVariableReferenceRelationship(field.symbol);
}
}
private void addTypeDependency(BTypeSymbol dependentTypeSymbol, BType providerType, Set<BType> unresolvedTypes) {
if (unresolvedTypes.contains(providerType)) {
return;
}
unresolvedTypes.add(providerType);
switch (providerType.tag) {
case TypeTags.UNION:
for (BType memberType : ((BUnionType) providerType).getMemberTypes()) {
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(memberType);
addTypeDependency(dependentTypeSymbol, effectiveType, unresolvedTypes);
}
break;
case TypeTags.ARRAY:
addTypeDependency(dependentTypeSymbol,
types.getTypeWithEffectiveIntersectionTypes(((BArrayType) providerType).getElementType()),
unresolvedTypes);
break;
case TypeTags.MAP:
addTypeDependency(dependentTypeSymbol,
types.getTypeWithEffectiveIntersectionTypes(((BMapType) providerType).getConstraint()),
unresolvedTypes);
break;
case TypeTags.TYPEREFDESC:
addTypeDependency(dependentTypeSymbol, Types.getReferredType(providerType),
unresolvedTypes);
break;
default:
addDependency(dependentTypeSymbol, providerType.tsymbol);
}
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
finiteTypeNode.valueSpace.forEach(value -> analyzeNode(value, env));
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
tupleTypeNode.memberTypeNodes.forEach(type -> analyzeNode(type, env));
}
@Override
public void visit(BLangMarkdownDocumentationLine bLangMarkdownDocumentationLine) {
}
@Override
public void visit(BLangMarkdownParameterDocumentation bLangDocumentationParameter) {
}
@Override
public void visit(BLangMarkdownReturnParameterDocumentation bLangMarkdownReturnParameterDocumentation) {
}
@Override
public void visit(BLangMarkdownDocumentation bLangMarkdownDocumentation) {
}
@Override
public void visit(BLangTestablePackage testablePkgNode) {
}
@Override
public void visit(BLangImportPackage importPkgNode) {
}
@Override
public void visit(BLangIdentifier identifierNode) {
}
@Override
public void visit(BLangPanic panicNode) {
analyzeNode(panicNode.expr, env);
terminateFlow();
}
@Override
public void visit(BLangTrapExpr trapExpr) {
analyzeNode(trapExpr.expr, env);
}
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
if (this.currDependentSymbolDeque.peek() != null) {
addDependency(this.currDependentSymbolDeque.peek(),
Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol);
}
addDependency(Types.getReferredType(serviceConstructorExpr.getBType()).tsymbol,
serviceConstructorExpr.serviceNode.symbol);
analyzeNode(serviceConstructorExpr.serviceNode, env);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
analyzeNode(typeTestExpr.expr, env);
analyzeNode(typeTestExpr.typeNode, env);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
analyzeNode(annotAccessExpr.expr, env);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
}
@Override
public void visit(BLangErrorType errorType) {
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
analyzeNode(recordDestructure.expr, env);
checkAssignment(recordDestructure.varRef);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
analyzeNode(errorDestructure.expr, env);
checkAssignment(errorDestructure.varRef);
}
@Override
public void visit(BLangTupleVarRef tupleVarRefExpr) {
tupleVarRefExpr.expressions.forEach(expr -> analyzeNode(expr, env));
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
varRefExpr.recordRefFields.forEach(expr -> analyzeNode(expr.variableReference, env));
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
analyzeNode(varRefExpr.message, env);
if (varRefExpr.cause != null) {
analyzeNode(varRefExpr.cause, env);
}
for (BLangNamedArgsExpression args : varRefExpr.detail) {
analyzeNode(args.expr, env);
}
analyzeNode(varRefExpr.restVar, env);
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable) {
analyzeNode(bLangTupleVariable.typeNode, env);
populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangTupleVariable);
this.currDependentSymbolDeque.push(bLangTupleVariable.symbol);
analyzeNode(bLangTupleVariable.expr, env);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef) {
analyzeNode(bLangTupleVariableDef.var, env);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable) {
analyzeNode(bLangRecordVariable.typeNode, env);
populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangRecordVariable);
this.currDependentSymbolDeque.push(bLangRecordVariable.symbol);
analyzeNode(bLangRecordVariable.expr, env);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef) {
analyzeNode(bLangRecordVariableDef.var, env);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable) {
analyzeNode(bLangErrorVariable.typeNode, env);
populateUnusedVariableMapForNonSimpleBindingPatternVariables(this.unusedLocalVariables, bLangErrorVariable);
this.currDependentSymbolDeque.push(bLangErrorVariable.symbol);
analyzeNode(bLangErrorVariable.expr, env);
this.currDependentSymbolDeque.pop();
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef) {
analyzeNode(bLangErrorVariableDef.errorVariable, env);
}
@Override
public void visit(BLangMatchStaticBindingPatternClause bLangMatchStaticBindingPatternClause) {
analyzeNode(bLangMatchStaticBindingPatternClause.body, env);
}
@Override
public void visit(BLangMatchStructuredBindingPatternClause bLangMatchStructuredBindingPatternClause) {
analyzeNode(bLangMatchStructuredBindingPatternClause.body, env);
}
private void addUninitializedVar(BLangVariable variable) {
if (!this.uninitializedVars.containsKey(variable.symbol)) {
this.uninitializedVars.put(variable.symbol, InitStatus.UN_INIT);
}
}
/**
* Analyze a branch and returns the set of uninitialized variables for that branch.
* This method will not update the current uninitialized variables set.
*
* @param node Branch node to be analyzed
* @param env Symbol environment
* @return Result of the branch.
*/
private BranchResult analyzeBranch(BLangNode node, SymbolEnv env) {
Map<BSymbol, InitStatus> prevUninitializedVars = this.uninitializedVars;
boolean prevFlowTerminated = this.flowTerminated;
this.uninitializedVars = copyUninitializedVars();
this.flowTerminated = false;
analyzeNode(node, env);
BranchResult brachResult = new BranchResult(this.uninitializedVars, this.flowTerminated);
this.uninitializedVars = prevUninitializedVars;
this.flowTerminated = prevFlowTerminated;
return brachResult;
}
private Map<BSymbol, InitStatus> copyUninitializedVars() {
return new HashMap<>(this.uninitializedVars);
}
private void analyzeNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
if (node != null) {
node.accept(this);
}
this.env = prevEnv;
}
private Map<BSymbol, InitStatus> mergeUninitializedVars(Map<BSymbol, InitStatus> firstUninitVars,
Map<BSymbol, InitStatus> secondUninitVars) {
List<BSymbol> intersection = new ArrayList<>(firstUninitVars.keySet());
intersection.retainAll(secondUninitVars.keySet());
return Stream.concat(firstUninitVars.entrySet().stream(), secondUninitVars.entrySet().stream())
.collect(Collectors.toMap(entry -> entry.getKey(),
entry -> intersection.contains(entry.getKey()) ? entry.getValue() : InitStatus.PARTIAL_INIT,
(a, b) -> {
if (a == InitStatus.PARTIAL_INIT || b == InitStatus.PARTIAL_INIT) {
return InitStatus.PARTIAL_INIT;
}
return InitStatus.UN_INIT;
}));
}
private void checkVarRef(BSymbol symbol, Location pos) {
recordGlobalVariableReferenceRelationship(symbol);
InitStatus initStatus = this.uninitializedVars.get(symbol);
if (initStatus == null) {
return;
}
if (initStatus == InitStatus.UN_INIT) {
this.dlog.error(pos, DiagnosticErrorCode.USAGE_OF_UNINITIALIZED_VARIABLE, symbol);
return;
}
this.dlog.error(pos, DiagnosticErrorCode.PARTIALLY_INITIALIZED_VARIABLE, symbol);
}
private void recordGlobalVariableReferenceRelationship(BSymbol symbol) {
if (this.env.scope == null) {
return;
}
boolean globalVarSymbol = isGlobalVarSymbol(symbol);
BSymbol ownerSymbol = this.env.scope.owner;
boolean isInPkgLevel = ownerSymbol.getKind() == SymbolKind.PACKAGE;
if (isInPkgLevel && (globalVarSymbol || symbol instanceof BTypeSymbol)
|| (ownerSymbol.tag == SymTag.LET && globalVarSymbol)) {
BSymbol dependent = this.currDependentSymbolDeque.peek();
addDependency(dependent, symbol);
} else if (ownerSymbol.kind == SymbolKind.FUNCTION && globalVarSymbol) {
BInvokableSymbol invokableOwnerSymbol = (BInvokableSymbol) ownerSymbol;
addDependency(invokableOwnerSymbol, symbol);
} else if (ownerSymbol.kind == SymbolKind.OBJECT && globalVarSymbol) {
addDependency(ownerSymbol, symbol);
} else if (ownerSymbol.kind == SymbolKind.RECORD && globalVarSymbol) {
addDependency(ownerSymbol, symbol);
}
}
private boolean isObjectMemberAccessWithSelf(BLangAccessExpression fieldAccessExpr) {
if (fieldAccessExpr.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return Names.SELF.value.equals(((BLangSimpleVarRef) fieldAccessExpr.expr).variableName.value);
}
private void checkAssignment(BLangExpression varRef) {
NodeKind kind = varRef.getKind();
switch (kind) {
case RECORD_VARIABLE_REF:
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) varRef;
recordVarRef.recordRefFields.forEach(field -> checkAssignment(field.variableReference));
if (recordVarRef.restParam != null) {
checkAssignment((BLangExpression) recordVarRef.restParam);
}
return;
case TUPLE_VARIABLE_REF:
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) varRef;
tupleVarRef.expressions.forEach(this::checkAssignment);
if (tupleVarRef.restParam != null) {
checkAssignment((BLangExpression) tupleVarRef.restParam);
}
return;
case ERROR_VARIABLE_REF:
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) varRef;
if (errorVarRef.message != null) {
checkAssignment(errorVarRef.message);
}
if (errorVarRef.cause != null) {
checkAssignment(errorVarRef.cause);
}
for (BLangNamedArgsExpression expression : errorVarRef.detail) {
checkAssignment(expression);
this.uninitializedVars.remove(((BLangVariableReference) expression.expr).symbol);
}
if (errorVarRef.restVar != null) {
checkAssignment(errorVarRef.restVar);
}
return;
case INDEX_BASED_ACCESS_EXPR:
case FIELD_BASED_ACCESS_EXPR:
BLangAccessExpression accessExpr = (BLangAccessExpression) varRef;
BLangExpression expr = accessExpr.expr;
BType type = Types.getReferredType(expr.getBType());
if (isObjectMemberAccessWithSelf(accessExpr)) {
BObjectType objectType = (BObjectType) type;
BSymbol symbol = accessExpr.symbol;
if (this.uninitializedVars.containsKey(symbol)) {
this.uninitializedVars.remove(symbol);
return;
}
String fieldName = ((BLangFieldBasedAccess) varRef).field.value;
checkFinalEntityUpdate(varRef.pos, fieldName, objectType.fields.get(fieldName).symbol);
return;
}
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
checkFinalObjectFieldUpdate((BLangFieldBasedAccess) accessExpr);
}
analyzeNode(expr, env);
if (kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
analyzeNode(((BLangIndexBasedAccess) varRef).indexExpr, env);
}
return;
default:
break;
}
if (kind != NodeKind.SIMPLE_VARIABLE_REF && kind != NodeKind.XML_ATTRIBUTE_ACCESS_EXPR) {
return;
}
if (kind == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol symbol = ((BLangSimpleVarRef) varRef).symbol;
checkFinalEntityUpdate(varRef.pos, varRef, symbol);
BSymbol owner = this.currDependentSymbolDeque.peek();
addFunctionToGlobalVarDependency(owner, ((BLangSimpleVarRef) varRef).symbol);
}
this.uninitializedVars.remove(((BLangVariableReference) varRef).symbol);
}
private void checkFinalObjectFieldUpdate(BLangFieldBasedAccess fieldAccess) {
BLangExpression expr = fieldAccess.expr;
BType exprType = Types.getReferredType(expr.getBType());
if (types.isSubTypeOfBaseType(exprType, TypeTags.OBJECT) &&
isFinalFieldInAllObjects(fieldAccess.pos, exprType, fieldAccess.field.value)) {
dlog.error(fieldAccess.pos, DiagnosticErrorCode.CANNOT_UPDATE_FINAL_OBJECT_FIELD, fieldAccess.symbol);
}
}
private boolean isFinalFieldInAllObjects(Location pos, BType btype, String fieldName) {
BType type = Types.getReferredType(btype);
if (type.tag == TypeTags.OBJECT) {
BField field = ((BObjectType) type).fields.get(fieldName);
if (field != null) {
return Symbols.isFlagOn(field.symbol.flags, Flags.FINAL);
}
BObjectTypeSymbol objTypeSymbol = (BObjectTypeSymbol) type.tsymbol;
Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objTypeSymbol.name.value, fieldName));
BSymbol funcSymbol = symResolver.resolveObjectMethod(pos, env, funcName, objTypeSymbol);
return funcSymbol != null;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isFinalFieldInAllObjects(pos, memberType, fieldName)) {
return false;
}
}
return true;
}
private void checkFinalEntityUpdate(Location pos, Object field, BSymbol symbol) {
if (symbol == null || !Symbols.isFlagOn(symbol.flags, Flags.FINAL)) {
return;
}
if (!this.uninitializedVars.containsKey(symbol)) {
dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_FINAL, symbol);
return;
}
InitStatus initStatus = this.uninitializedVars.get(symbol);
if (initStatus == InitStatus.PARTIAL_INIT) {
dlog.error(pos, DiagnosticErrorCode.CANNOT_ASSIGN_VALUE_TO_POTENTIALLY_INITIALIZED_FINAL, symbol);
}
}
private void terminateFlow() {
this.flowTerminated = true;
}
private void checkUnusedImports(List<BLangImportPackage> imports) {
for (BLangImportPackage importStmt : imports) {
if (importStmt.symbol == null || importStmt.symbol.isUsed ||
Names.IGNORE.value.equals(importStmt.alias.value)) {
continue;
}
dlog.error(importStmt.alias.pos, DiagnosticErrorCode.UNUSED_MODULE_PREFIX, importStmt.alias.value);
}
}
private void checkUnusedErrorVarsDeclaredWithVar() {
for (Map.Entry<BSymbol, Location> entry : this.unusedErrorVarsDeclaredWithVar.entrySet()) {
this.dlog.error(entry.getValue(), DiagnosticErrorCode.UNUSED_VARIABLE_WITH_INFERRED_TYPE_INCLUDING_ERROR,
entry.getKey().name);
}
}
private void emitUnusedVariableWarnings(Map<BSymbol, Location> unusedLocalVariables) {
for (Map.Entry<BSymbol, Location> entry : unusedLocalVariables.entrySet()) {
this.dlog.warning(entry.getValue(), DiagnosticWarningCode.UNUSED_LOCAL_VARIABLE, entry.getKey().name);
}
}
private boolean addVarIfInferredTypeIncludesError(BLangSimpleVariable variable) {
BType typeIntersection =
types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionContext(),
variable.getBType(), symTable.errorType, env);
if (typeIntersection != null &&
typeIntersection != symTable.semanticError && typeIntersection != symTable.noType) {
unusedErrorVarsDeclaredWithVar.put(variable.symbol, variable.pos);
return true;
}
return false;
}
private boolean isLocalVariableDefinedWithNonWildCardBindingPattern(BLangSimpleVariable variable) {
if (isWildCardBindingPattern(variable)) {
return false;
}
return isLocalVariable(variable.symbol);
}
private boolean isWildCardBindingPattern(BLangSimpleVariable variable) {
return Names.IGNORE.value.equals(variable.name.value);
}
private boolean isWildCardBindingPattern(BVarSymbol symbol) {
return Names.IGNORE == symbol.name;
}
private boolean isLocalVariable(BVarSymbol symbol) {
if (symbol == null) {
return false;
}
BSymbol owner = symbol.owner;
if (owner == null || owner.tag == SymTag.PACKAGE) {
return false;
}
if (owner.tag == SymTag.LET) {
return true;
}
if (owner.tag != SymTag.FUNCTION) {
return false;
}
long flags = symbol.flags;
SymbolKind kind = symbol.kind;
if (kind == SymbolKind.PATH_PARAMETER || kind == SymbolKind.PATH_REST_PARAMETER) {
return false;
}
return !Symbols.isFlagOn(flags, Flags.REQUIRED_PARAM)
&& !Symbols.isFlagOn(flags, Flags.DEFAULTABLE_PARAM)
&& !Symbols.isFlagOn(flags, Flags.INCLUDED)
&& !Symbols.isFlagOn(flags, Flags.REST_PARAM);
}
private void populateUnusedVariableMapForNonSimpleBindingPatternVariables(
Map<BSymbol, Location> unusedLocalVariables, BLangVariable variable) {
if (!isLocalVariable(variable.symbol)) {
return;
}
populateUnusedVariableMapForMembers(unusedLocalVariables, variable);
}
private void populateUnusedVariableMapForMembers(Map<BSymbol, Location> unusedLocalVariables,
BLangVariable variable) {
if (variable == null) {
return;
}
switch (variable.getKind()) {
case VARIABLE:
BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;
if (!isWildCardBindingPattern(simpleVariable)) {
unusedLocalVariables.put(simpleVariable.symbol, simpleVariable.pos);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
for (BLangRecordVariable.BLangRecordVariableKeyValue member : recordVariable.variableList) {
populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern);
}
populateUnusedVariableMapForMembers(unusedLocalVariables, (BLangVariable) recordVariable.restParam);
break;
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
for (BLangVariable memberVariable : tupleVariable.memberVariables) {
populateUnusedVariableMapForMembers(unusedLocalVariables, memberVariable);
}
populateUnusedVariableMapForMembers(unusedLocalVariables, tupleVariable.restVariable);
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.message);
populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.cause);
for (BLangErrorVariable.BLangErrorDetailEntry member : errorVariable.detail) {
populateUnusedVariableMapForMembers(unusedLocalVariables, member.valueBindingPattern);
}
populateUnusedVariableMapForMembers(unusedLocalVariables, errorVariable.restDetail);
break;
}
}
private boolean isNotVariableReferenceLVExpr(BLangSimpleVarRef varRefExpr) {
if (!varRefExpr.isLValue) {
return true;
}
BLangNode parent = varRefExpr.parent;
return parent != null && parent.getKind() != NodeKind.ASSIGNMENT;
}
private boolean isNotRangeExpr(BLangExpression collection) {
if (collection.getKind() != NodeKind.BINARY_EXPR) {
return true;
}
OperatorKind opKind = ((BLangBinaryExpr) collection).opKind;
return opKind != OperatorKind.HALF_OPEN_RANGE && opKind != OperatorKind.CLOSED_RANGE;
}
private boolean isFunctionOrMethodDefinedInCurrentModule(BSymbol owner, SymbolEnv env) {
if (Symbols.isFlagOn(owner.flags, Flags.CLASS)) {
return owner.owner == getEnclPkgSymbol(env);
}
return owner == getEnclPkgSymbol(env);
}
private BPackageSymbol getEnclPkgSymbol(SymbolEnv env) {
BLangPackage enclPkg = env.enclPkg;
if (enclPkg != null) {
return enclPkg.symbol;
}
SymbolEnv enclEnv = env.enclEnv;
if (enclEnv == null) {
return null;
}
return getEnclPkgSymbol(enclEnv);
}
private enum InitStatus {
UN_INIT, PARTIAL_INIT
}
private class BranchResult {
Map<BSymbol, InitStatus> uninitializedVars;
boolean flowTerminated;
BranchResult(Map<BSymbol, InitStatus> uninitializedVars, boolean flowTerminated) {
this.uninitializedVars = uninitializedVars;
this.flowTerminated = flowTerminated;
}
}
} |
Yes. is there any issue with that? | public void visit(BLangObjectConstructorExpression objectCtorExpression, AnalyzerData data) {
BLangClassDefinition classNode = objectCtorExpression.classNode;
classNode.oceEnvData.capturedClosureEnv = data.env;
BLangClassDefinition originalClass = classNode.oceEnvData.originalClass;
if (originalClass.cloneRef != null && !objectCtorExpression.defined) {
classNode = (BLangClassDefinition) originalClass.cloneRef;
symbolEnter.defineClassDefinition(classNode, data.env);
objectCtorExpression.defined = true;
}
BObjectType objectType;
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
objectType = (BObjectType) objectCtorExpression.classNode.getBType();
BType effectiveType = Types.getEffectiveType(Types.getReferredType(objectCtorExpression.expectedType));
if (effectiveType.tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) Types.getReferredType(effectiveType);
objectType.typeIdSet = expObjType.typeIdSet;
} else if (effectiveType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
data.resultType = symTable.semanticError;
return;
}
}
}
BLangTypeInit cIExpr = objectCtorExpression.typeInit;
BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, data.env);
if (actualType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
BObjectType actualObjectType = (BObjectType) actualType;
List<BLangType> typeRefs = classNode.typeRefs;
SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, data.env);
classNode.oceEnvData.typeInit = objectCtorExpression.typeInit;
dlog.unmute();
if (Symbols.isFlagOn(data.expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false, data);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true, data);
} else {
semanticAnalyzer.analyzeNode(classNode, typeDefEnv);
}
dlog.unmute();
markConstructedObjectIsolatedness(actualObjectType);
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation, data);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType, data)) {
return;
}
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType(), data);
data.resultType = types.checkType(cIExpr, actualTypeInitType, data.expType);
} | BType effectiveType = Types.getEffectiveType(Types.getReferredType(objectCtorExpression.expectedType)); | public void visit(BLangObjectConstructorExpression objectCtorExpression, AnalyzerData data) {
BLangClassDefinition classNode = objectCtorExpression.classNode;
classNode.oceEnvData.capturedClosureEnv = data.env;
BLangClassDefinition originalClass = classNode.oceEnvData.originalClass;
if (originalClass.cloneRef != null && !objectCtorExpression.defined) {
classNode = (BLangClassDefinition) originalClass.cloneRef;
symbolEnter.defineClassDefinition(classNode, data.env);
objectCtorExpression.defined = true;
}
BObjectType objectType;
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
objectType = (BObjectType) objectCtorExpression.classNode.getBType();
BType effectiveType = Types.getEffectiveType(Types.getReferredType(objectCtorExpression.expectedType));
if (effectiveType.tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) Types.getReferredType(effectiveType);
objectType.typeIdSet = expObjType.typeIdSet;
} else if (effectiveType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
data.resultType = symTable.semanticError;
return;
}
}
}
BLangTypeInit cIExpr = objectCtorExpression.typeInit;
BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, data.env);
if (actualType == symTable.semanticError) {
data.resultType = symTable.semanticError;
return;
}
BObjectType actualObjectType = (BObjectType) actualType;
List<BLangType> typeRefs = classNode.typeRefs;
SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, data.env);
classNode.oceEnvData.typeInit = objectCtorExpression.typeInit;
dlog.unmute();
if (Symbols.isFlagOn(data.expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false, data);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true, data);
} else {
semanticAnalyzer.analyzeNode(classNode, typeDefEnv);
}
dlog.unmute();
markConstructedObjectIsolatedness(actualObjectType);
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation, data);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType, data)) {
return;
}
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType(), data);
data.resultType = types.checkType(cIExpr, actualTypeInitType, data.expType);
} | class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
} | class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
} |
That's nice! We can use that actually by checking if `result` is `Multi` and doing this special handling in such a case. | private void handleStreaming(ResteasyReactiveRequestContext requestContext, Publisher<?> result, boolean json) {
result.subscribe(new StreamingMultiSubscriber(requestContext, json));
} | result.subscribe(new StreamingMultiSubscriber(requestContext, json)); | private void handleStreaming(ResteasyReactiveRequestContext requestContext, Publisher<?> result, boolean json) {
result.subscribe(new StreamingMultiSubscriber(requestContext, json));
} | class AbstractMultiSubscriber implements Subscriber<Object> {
protected Subscription subscription;
protected ResteasyReactiveRequestContext requestContext;
private boolean weClosed = false;
AbstractMultiSubscriber(ResteasyReactiveRequestContext requestContext) {
this.requestContext = requestContext;
requestContext.restart(AWOL, true);
requestContext.serverResponse().addCloseHandler(() -> {
if (!weClosed && this.subscription != null) {
subscription.cancel();
}
});
}
@Override
public void onSubscribe(Subscription s) {
this.subscription = s;
s.request(1);
}
@Override
public void onComplete() {
weClosed = true;
requestContext.serverResponse().end();
requestContext.serverRequest().closeConnection();
requestContext.close();
}
@Override
public void onError(Throwable t) {
handleException(requestContext, t);
}
protected void handleException(ResteasyReactiveRequestContext requestContext, Throwable t) {
if (requestContext.serverResponse().headWritten()) {
log.error("Exception in SSE server handling, impossible to send it to client", t);
} else {
requestContext.resume(t);
}
}
} | class AbstractMultiSubscriber implements Subscriber<Object> {
protected Subscription subscription;
protected ResteasyReactiveRequestContext requestContext;
private boolean weClosed = false;
AbstractMultiSubscriber(ResteasyReactiveRequestContext requestContext) {
this.requestContext = requestContext;
requestContext.restart(AWOL, true);
requestContext.serverResponse().addCloseHandler(() -> {
if (!weClosed && this.subscription != null) {
subscription.cancel();
}
});
}
@Override
public void onSubscribe(Subscription s) {
this.subscription = s;
s.request(1);
}
@Override
public void onComplete() {
weClosed = true;
requestContext.serverResponse().end();
requestContext.serverRequest().closeConnection();
requestContext.close();
}
@Override
public void onError(Throwable t) {
handleException(requestContext, t);
}
protected void handleException(ResteasyReactiveRequestContext requestContext, Throwable t) {
if (requestContext.serverResponse().headWritten()) {
log.error("Exception in SSE server handling, impossible to send it to client", t);
} else {
requestContext.resume(t);
}
}
} |
IoT Hub query tests add a buffer or retry to account for any sort of propagation delay between when a twin is created and when query can find it. Does this test pass reliably? | public void validQuerySucceeds(HttpClient httpClient, DigitalTwinsServiceVersion serviceVersion) {
DigitalTwinsAsyncClient asyncClient = getAsyncClient(httpClient, serviceVersion);
String floorModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.FLOOR_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator);
String roomModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.ROOM_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator);
String roomTwinId = UniqueIdHelper.getUniqueDigitalTwinId(TestAssetDefaults.ROOM_TWIN_ID_PREFIX, asyncClient, randomIntegerStringGenerator);
try {
String roomModelPayload = TestAssetsHelper.getRoomModelPayload(roomModelId, floorModelId);
StepVerifier.create(asyncClient.createModels(new ArrayList<>(Arrays.asList(roomModelPayload))))
.assertNext(response ->
assertThat(response.size())
.as("Created models successfully")
.isEqualTo(1))
.verifyComplete();
String roomTwin = TestAssetsHelper.getRoomTwinPayload(roomModelId);
StepVerifier.create(asyncClient.createDigitalTwinWithResponse(roomTwinId, roomTwin))
.assertNext(response ->
assertThat(response.getStatusCode())
.as("Created digitaltwin successfully")
.isEqualTo(HttpURLConnection.HTTP_OK))
.verifyComplete();
String queryString = "SELECT * FROM digitaltwins where IsOccupied = true";
StepVerifier.create(asyncClient.query(queryString, BasicDigitalTwin.class))
.thenConsumeWhile(dt -> {
assertThat(dt.getCustomProperties().get("IsOccupied"))
.as("IsOccupied should be true")
.isEqualTo(true);
return true;
})
.verifyComplete();
}
finally {
try {
if (roomTwinId != null) {
asyncClient.deleteDigitalTwin(roomTwinId).block();
}
if (roomModelId != null){
asyncClient.deleteModel(roomModelId).block();
}
}
catch (Exception ex)
{
fail("Failed to cleanup due to: ", ex);
}
}
} | StepVerifier.create(asyncClient.query(queryString, BasicDigitalTwin.class)) | public void validQuerySucceeds(HttpClient httpClient, DigitalTwinsServiceVersion serviceVersion) {
DigitalTwinsAsyncClient asyncClient = getAsyncClient(httpClient, serviceVersion);
String floorModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.FLOOR_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator);
String roomModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.ROOM_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator);
String roomTwinId = UniqueIdHelper.getUniqueDigitalTwinId(TestAssetDefaults.ROOM_TWIN_ID_PREFIX, asyncClient, randomIntegerStringGenerator);
try {
String roomModelPayload = TestAssetsHelper.getRoomModelPayload(roomModelId, floorModelId);
StepVerifier.create(asyncClient.createModels(new ArrayList<>(Arrays.asList(roomModelPayload))))
.assertNext(response ->
assertThat(response.size())
.as("Created models successfully")
.isEqualTo(1))
.verifyComplete();
String roomTwin = TestAssetsHelper.getRoomTwinPayload(roomModelId);
StepVerifier.create(asyncClient.createDigitalTwinWithResponse(roomTwinId, roomTwin))
.assertNext(response ->
assertThat(response.getStatusCode())
.as("Created digitaltwin successfully")
.isEqualTo(HttpURLConnection.HTTP_OK))
.verifyComplete();
String queryString = "SELECT * FROM digitaltwins where IsOccupied = true";
StepVerifier.create(asyncClient.query(queryString, BasicDigitalTwin.class))
.thenConsumeWhile(dt -> {
assertThat(dt.getCustomProperties().get("IsOccupied"))
.as("IsOccupied should be true")
.isEqualTo(true);
return true;
})
.verifyComplete();
}
finally {
try {
if (roomTwinId != null) {
asyncClient.deleteDigitalTwin(roomTwinId).block();
}
if (roomModelId != null){
asyncClient.deleteModel(roomModelId).block();
}
}
catch (Exception ex)
{
fail("Failed to cleanup due to: ", ex);
}
}
} | class QueryAsyncTests extends QueryTestBase{
private final ClientLogger logger = new ClientLogger(ComponentsTests.class);
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.digitaltwins.core.TestHelper
@Override
} | class QueryAsyncTests extends QueryTestBase{
private final ClientLogger logger = new ClientLogger(ComponentsTests.class);
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.digitaltwins.core.TestHelper
@Override
} |
The deactivate implementation in the config server client only have retries over the three servers, as it needs to hit the correct one. There is no other retrying logic there, for that path. | private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) {
try {
return retrying(10, () -> {
logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.applications().deactivate(id.application(), id.type().zone(controller.system()));
return running;
});
}
catch (RuntimeException e) {
logger.log(WARNING, "Failed deleting application " + id.application(), e);
return Optional.of(error);
}
} | return retrying(10, () -> { | private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) {
try {
return retrying(10, () -> {
logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.applications().deactivate(id.application(), id.type().zone(controller.system()));
return running;
});
}
catch (RuntimeException e) {
logger.log(WARNING, "Failed deleting application " + id.application(), e);
return Optional.of(error);
}
} | class InternalStepRunner implements StepRunner {
private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName());
private static final NodeResources DEFAULT_TESTER_RESOURCES =
new NodeResources(1, 4, 50, 0.3, NodeResources.DiskSpeed.any);
private static final NodeResources DEFAULT_TESTER_RESOURCES_AWS =
new NodeResources(2, 8, 50, 0.3, NodeResources.DiskSpeed.any);
static final Duration endpointTimeout = Duration.ofMinutes(15);
static final Duration testerTimeout = Duration.ofMinutes(30);
static final Duration installationTimeout = Duration.ofMinutes(150);
static final Duration certificateTimeout = Duration.ofMinutes(300);
private final Controller controller;
private final TestConfigSerializer testConfigSerializer;
private final DeploymentFailureMails mails;
public InternalStepRunner(Controller controller) {
this.controller = controller;
this.testConfigSerializer = new TestConfigSerializer(controller.system());
this.mails = new DeploymentFailureMails(controller.zoneRegistry());
}
@Override
public Optional<RunStatus> run(LockedStep step, RunId id) {
DualLogger logger = new DualLogger(id, step.get());
try {
switch (step.get()) {
case deployTester: return deployTester(id, logger);
case deployInitialReal: return deployInitialReal(id, logger);
case installInitialReal: return installInitialReal(id, logger);
case deployReal: return deployReal(id, logger);
case installTester: return installTester(id, logger);
case installReal: return installReal(id, logger);
case startTests: return startTests(id, logger);
case endTests: return endTests(id, logger);
case copyVespaLogs: return copyVespaLogs(id, logger);
case deactivateReal: return deactivateReal(id, logger);
case deactivateTester: return deactivateTester(id, logger);
case report: return report(id, logger);
default: throw new AssertionError("Unknown step '" + step + "'!");
}
}
catch (UncheckedIOException e) {
logger.logWithInternalException(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e), e);
return Optional.empty();
}
catch (RuntimeException e) {
logger.log(WARNING, "Unexpected exception running " + id, e);
if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) {
logger.log("Will keep trying, as this is a cleanup step.");
return Optional.empty();
}
return Optional.of(error);
}
}
private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " +
versions.sourcePlatform().orElse(versions.targetPlatform()) +
" and application version " +
versions.sourceApplication().orElse(versions.targetApplication()).id() + " ...");
return deployReal(id, true, versions, logger);
}
private Optional<RunStatus> deployReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " + versions.targetPlatform() +
" and application version " + versions.targetApplication().id() + " ...");
return deployReal(id, false, versions, logger);
}
private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, Versions versions, DualLogger logger) {
Optional<ApplicationPackage> applicationPackage = id.type().environment().isManuallyDeployed()
? Optional.of(new ApplicationPackage(controller.applications().applicationStore()
.getDev(id.application(), id.type().zone(controller.system()))))
: Optional.empty();
Optional<Version> vespaVersion = id.type().environment().isManuallyDeployed()
? Optional.of(versions.targetPlatform())
: Optional.empty();
return deploy(id.application(),
id.type(),
() -> controller.applications().deploy(id.application(),
id.type().zone(controller.system()),
applicationPackage,
new DeployOptions(false,
vespaVersion,
false,
setTheStage)),
logger);
}
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) {
Version platform = controller.jobController().run(id).get().versions().targetPlatform();
logger.log("Deploying the tester container on platform " + platform + " ...");
return deploy(id.tester().id(),
id.type(),
() -> controller.applications().deployTester(id.tester(),
testerPackage(id),
id.type().zone(controller.system()),
new DeployOptions(true,
Optional.of(platform),
false,
false)),
logger);
}
private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, DualLogger logger) {
try {
PrepareResponse prepareResponse = deployment.get().prepareResponse();
if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) {
List<String> messages = new ArrayList<>();
messages.add("Deploy failed due to non-compatible changes that require re-feed.");
messages.add("Your options are:");
messages.add("1. Revert the incompatible changes.");
messages.add("2. If you think it is safe in your case, you can override this validation, see");
messages.add(" http:
messages.add("3. Deploy as a new application under a different name.");
messages.add("Illegal actions:");
prepareResponse.configChangeActions.refeedActions.stream()
.filter(action -> ! action.allowed)
.flatMap(action -> action.messages.stream())
.forEach(messages::add);
messages.add("Details:");
prepareResponse.log.stream()
.map(entry -> entry.message)
.forEach(messages::add);
logger.log(messages);
return Optional.of(deploymentFailed);
}
if (prepareResponse.configChangeActions.restartActions.isEmpty())
logger.log("No services requiring restart.");
else
prepareResponse.configChangeActions.restartActions.stream()
.flatMap(action -> action.services.stream())
.map(service -> service.hostName)
.sorted().distinct()
.map(Hostname::new)
.forEach(hostname -> {
controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname));
logger.log("Restarting services on host " + hostname.id() + ".");
});
logger.log("Deployment successful.");
if (prepareResponse.message != null)
logger.log(prepareResponse.message);
return Optional.of(running);
}
catch (ConfigServerException e) {
if ( e.getErrorCode() == ACTIVATION_CONFLICT
|| e.getErrorCode() == APPLICATION_LOCK_FAILURE
|| e.getErrorCode() == PARENT_HOST_NOT_READY
|| e.getErrorCode() == CERTIFICATE_NOT_READY
|| e.getErrorCode() == LOAD_BALANCER_NOT_READY) {
logger.log("Will retry, because of '" + e.getErrorCode() + "' deploying:\n" + e.getMessage());
return Optional.empty();
}
if ( e.getErrorCode() == INVALID_APPLICATION_PACKAGE
|| e.getErrorCode() == BAD_REQUEST
|| e.getErrorCode() == OUT_OF_CAPACITY) {
logger.log("Deployment failed: " + e.getMessage());
return Optional.of(e.getErrorCode() == OUT_OF_CAPACITY ? outOfCapacity : deploymentFailed);
}
throw e;
}
}
private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) {
return installReal(id, true, logger);
}
private Optional<RunStatus> installReal(RunId id, DualLogger logger) {
return installReal(id, false, logger);
}
private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) {
Optional<Deployment> deployment = deployment(id.application(), id.type());
if ( ! deployment.isPresent()) {
logger.log(INFO, "Deployment expired before installation was successful.");
return Optional.of(installationFailed);
}
Versions versions = controller.jobController().run(id).get().versions();
Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform();
ApplicationVersion application = setTheStage ? versions.sourceApplication().orElse(versions.targetApplication()) : versions.targetApplication();
logger.log("Checking installation of " + platform + " and " + application.id() + " ...");
if ( nodesConverged(id.application(), id.type(), platform, logger)
&& servicesConverged(id.application(), id.type(), platform, logger)) {
if (endpointsAvailable(id.application(), id.type().zone(controller.system()), logger)) {
if (containersAreUp(id.application(), id.type().zone(controller.system()), logger)) {
logger.log("Installation succeeded!");
return Optional.of(running);
}
}
else if (timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
}
if (timedOut(id, deployment.get(), installationTimeout)) {
logger.log(INFO, "Installation failed to complete within " + installationTimeout.toMinutes() + " minutes!");
return Optional.of(installationFailed);
}
logger.log("Installation not yet complete.");
return Optional.empty();
}
private Optional<RunStatus> installTester(RunId id, DualLogger logger) {
Optional<Deployment> deployment = deployment(id.application(), id.type());
if ( ! deployment.isPresent()) {
logger.log(WARNING, "Deployment expired before installation of tester was successful.");
return Optional.of(error);
}
Version platform = controller.jobController().run(id).get().versions().targetPlatform();
logger.log("Checking installation of tester container ...");
if ( nodesConverged(id.tester().id(), id.type(), platform, logger)
&& servicesConverged(id.tester().id(), id.type(), platform, logger)) {
if (endpointsAvailable(id.tester().id(), id.type().zone(controller.system()), logger)) {
if (containersAreUp(id.tester().id(), id.type().zone(controller.system()), logger)) {
logger.log("Tester container successfully installed!");
return Optional.of(running);
}
}
else if (timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Tester failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
}
if (timedOut(id, deployment.get(), testerTimeout)) {
logger.log(WARNING, "Installation of tester failed to complete within " + testerTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Installation of tester not yet complete.");
return Optional.empty();
}
/** Returns true iff all containers in the deployment give 100 consecutive 200 OK responses on /status.html. */
private boolean containersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) {
var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zoneId)));
if ( ! endpoints.containsKey(zoneId))
return false;
for (URI endpoint : endpoints.get(zoneId).values()) {
boolean ready = id.instance().isTester() ?
controller.jobController().cloud().testerReady(endpoint)
: controller.jobController().cloud().ready(endpoint);
if (!ready) {
logger.log("Failed to get 100 consecutive OKs from " + endpoint);
return false;
}
}
return true;
}
private boolean endpointsAvailable(ApplicationId id, ZoneId zone, DualLogger logger) {
logger.log("Attempting to find deployment endpoints ...");
var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zone)));
if ( ! endpoints.containsKey(zone)) {
logger.log("Endpoints not yet ready.");
return false;
}
for (var endpoint : endpoints.get(zone).values())
if ( ! controller.jobController().cloud().exists(endpoint)) {
logger.log(INFO, "DNS lookup yielded no IP address for '" + endpoint + "'.");
return false;
}
logEndpoints(endpoints, logger);
return true;
}
private void logEndpoints(Map<ZoneId, Map<ClusterSpec.Id, URI>> endpoints, DualLogger logger) {
List<String> messages = new ArrayList<>();
messages.add("Found endpoints:");
endpoints.forEach((zone, uris) -> {
messages.add("- " + zone);
uris.forEach((cluster, uri) -> messages.add(" |-- " + uri + " (" + cluster + ")"));
});
logger.log(messages);
}
private boolean nodesConverged(ApplicationId id, JobType type, Version target, DualLogger logger) {
List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(type.zone(controller.system()), id, ImmutableSet.of(active, reserved));
List<String> statuses = nodes.stream()
.map(node -> String.format("%70s: %-16s%-25s%-32s%s",
node.hostname(),
node.serviceState(),
node.wantedVersion() + (node.currentVersion().equals(node.wantedVersion()) ? "" : " <-- " + node.currentVersion()),
node.restartGeneration() >= node.wantedRestartGeneration() ? ""
: "restart pending (" + node.wantedRestartGeneration() + " <-- " + node.restartGeneration() + ")",
node.rebootGeneration() >= node.wantedRebootGeneration() ? ""
: "reboot pending (" + node.wantedRebootGeneration() + " <-- " + node.rebootGeneration() + ")"))
.collect(Collectors.toList());
logger.log(statuses);
return nodes.stream().allMatch(node -> node.currentVersion().equals(target)
&& node.restartGeneration() >= node.wantedRestartGeneration()
&& node.rebootGeneration() >= node.wantedRebootGeneration());
}
private boolean servicesConverged(ApplicationId id, JobType type, Version platform, DualLogger logger) {
var convergence = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(id, type.zone(controller.system())),
Optional.of(platform));
if (convergence.isEmpty()) {
logger.log("Config status not currently available -- will retry.");
return false;
}
logger.log("Wanted config generation is " + convergence.get().wantedGeneration());
List<String> statuses = convergence.get().services().stream()
.filter(serviceStatus -> serviceStatus.currentGeneration() != convergence.get().wantedGeneration())
.map(serviceStatus -> String.format("%70s: %11s on port %4d has config generation %s",
serviceStatus.host().value(),
serviceStatus.type(),
serviceStatus.port(),
serviceStatus.currentGeneration() == -1 ? "not started!" : Long.toString(serviceStatus.currentGeneration())))
.collect(Collectors.toList());
logger.log(statuses);
if (statuses.isEmpty())
logger.log("All services on wanted config generation.");
return convergence.get().converged();
}
private Optional<RunStatus> startTests(RunId id, DualLogger logger) {
Optional<Deployment> deployment = deployment(id.application(), id.type());
if (deployment.isEmpty()) {
logger.log(INFO, "Deployment expired before tests could start.");
return Optional.of(aborted);
}
var deployments = controller.applications().requireInstance(id.application())
.productionDeployments().keySet().stream()
.map(zone -> new DeploymentId(id.application(), zone))
.collect(Collectors.toSet());
deployments.add(new DeploymentId(id.application(), id.type().zone(controller.system())));
logger.log("Attempting to find endpoints ...");
var endpoints = controller.applications().clusterEndpoints(deployments);
if ( ! endpoints.containsKey(id.type().zone(controller.system())) && timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Endpoints for the deployment to test vanished again, while it was still active!");
return Optional.of(error);
}
logEndpoints(endpoints, logger);
Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id);
if (testerEndpoint.isEmpty() && timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Endpoints for the tester container vanished again, while it was still active!");
return Optional.of(error);
}
if ( ! controller.jobController().cloud().testerReady(testerEndpoint.get())) {
logger.log(WARNING, "Tester container went bad!");
return Optional.of(error);
}
logger.log("Starting tests ...");
controller.jobController().cloud().startTests(testerEndpoint.get(),
TesterCloud.Suite.of(id.type()),
testConfigSerializer.configJson(id.application(),
id.type(),
true,
endpoints,
controller.applications().contentClustersByZone(deployments)));
return Optional.of(running);
}
private Optional<RunStatus> endTests(RunId id, DualLogger logger) {
if ( ! deployment(id.application(), id.type()).isPresent()) {
logger.log(INFO, "Deployment expired before tests could complete.");
return Optional.of(aborted);
}
Optional<X509Certificate> testerCertificate = controller.jobController().run(id).get().testerCertificate();
if (testerCertificate.isPresent()) {
try {
testerCertificate.get().checkValidity(Date.from(controller.clock().instant()));
}
catch (CertificateExpiredException | CertificateNotYetValidException e) {
logger.log(INFO, "Tester certificate expired before tests could complete.");
return Optional.of(aborted);
}
}
Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id);
if ( ! testerEndpoint.isPresent()) {
logger.log("Endpoints for tester not found -- trying again later.");
return Optional.empty();
}
controller.jobController().updateTestLog(id);
TesterCloud.Status testStatus = controller.jobController().cloud().getStatus(testerEndpoint.get());
switch (testStatus) {
case NOT_STARTED:
throw new IllegalStateException("Tester reports tests not started, even though they should have!");
case RUNNING:
return Optional.empty();
case FAILURE:
logger.log("Tests failed.");
return Optional.of(testFailure);
case ERROR:
logger.log(INFO, "Tester failed running its tests!");
return Optional.of(error);
case SUCCESS:
logger.log("Tests completed successfully.");
return Optional.of(running);
default:
throw new IllegalStateException("Unknown status '" + testStatus + "'!");
}
}
private Optional<RunStatus> copyVespaLogs(RunId id, DualLogger logger) {
if (deployment(id.application(), id.type()).isPresent())
try {
controller.jobController().updateVespaLog(id);
}
catch (Exception e) {
logger.log(INFO, "Failure getting vespa logs for " + id, e);
return Optional.of(error);
}
return Optional.of(running);
}
private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) {
try {
return retrying(10, () -> {
logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.jobController().deactivateTester(id.tester(), id.type());
return running;
});
}
catch (RuntimeException e) {
logger.log(WARNING, "Failed deleting tester of " + id.application(), e);
return Optional.of(error);
}
}
private static Optional<RunStatus> retrying(int retries, Supplier<RunStatus> task) {
RuntimeException exception = null;
do {
try {
return Optional.of(task.get());
}
catch (RuntimeException e) {
if (exception == null)
exception = e;
else
exception.addSuppressed(e);
}
} while (--retries >= 0);
throw exception;
}
private Optional<RunStatus> report(RunId id, DualLogger logger) {
try {
controller.jobController().active(id).ifPresent(run -> {
if (run.hasFailed())
sendNotification(run, logger);
});
}
catch (IllegalStateException e) {
logger.log(INFO, "Job '" + id.type() + "' no longer supposed to run?", e);
return Optional.of(error);
}
return Optional.of(running);
}
/** Sends a mail with a notification of a failed run, if one should be sent. */
private void sendNotification(Run run, DualLogger logger) {
Application application = controller.applications().requireApplication(TenantAndApplicationId.from(run.id().application()));
Notifications notifications = application.deploymentSpec().requireInstance(run.id().application().instance()).notifications();
boolean newCommit = application.change().application()
.map(run.versions().targetApplication()::equals)
.orElse(false);
When when = newCommit ? failingCommit : failing;
List<String> recipients = new ArrayList<>(notifications.emailAddressesFor(when));
if (notifications.emailRolesFor(when).contains(author))
run.versions().targetApplication().authorEmail().ifPresent(recipients::add);
if (recipients.isEmpty())
return;
try {
if (run.status() == outOfCapacity && run.id().type().isProduction())
controller.serviceRegistry().mailer().send(mails.outOfCapacity(run.id(), recipients));
if (run.status() == deploymentFailed)
controller.serviceRegistry().mailer().send(mails.deploymentFailure(run.id(), recipients));
if (run.status() == installationFailed)
controller.serviceRegistry().mailer().send(mails.installationFailure(run.id(), recipients));
if (run.status() == testFailure)
controller.serviceRegistry().mailer().send(mails.testFailure(run.id(), recipients));
if (run.status() == error)
controller.serviceRegistry().mailer().send(mails.systemError(run.id(), recipients));
}
catch (RuntimeException e) {
logger.log(INFO, "Exception trying to send mail for " + run.id(), e);
}
}
/** Returns the deployment of the real application in the zone of the given job, if it exists. */
private Optional<Deployment> deployment(ApplicationId id, JobType type) {
return Optional.ofNullable(application(id).deployments().get(type.zone(controller.system())));
}
/** Returns the real application with the given id. */
private Instance application(ApplicationId id) {
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), __ -> { });
return controller.applications().requireInstance(id);
}
/**
* Returns whether the time since deployment is more than the zone deployment expiry, or the given timeout.
*
* We time out the job before the deployment expires, for zone where deployments are not persistent,
* to be able to collect the Vespa log from the deployment. Thus, the lower of the zone's deployment expiry,
* and the given default installation timeout, minus one minute, is used as a timeout threshold.
*/
private boolean timedOut(RunId id, Deployment deployment, Duration defaultTimeout) {
Run run = controller.jobController().run(id).get();
if (run.start().isAfter(deployment.at()))
return false;
Duration timeout = controller.zoneRegistry().getDeploymentTimeToLive(deployment.zone())
.filter(zoneTimeout -> zoneTimeout.compareTo(defaultTimeout) < 0)
.orElse(defaultTimeout);
return deployment.at().isBefore(controller.clock().instant().minus(timeout.minus(Duration.ofMinutes(1))));
}
/** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */
private ApplicationPackage testerPackage(RunId id) {
ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication();
DeploymentSpec spec = controller.applications().requireApplication(TenantAndApplicationId.from(id.application())).deploymentSpec();
ZoneId zone = id.type().zone(controller.system());
boolean useTesterCertificate = controller.system().isPublic() && id.type().isTest();
byte[] servicesXml = servicesXml(controller.zoneRegistry().accessControlDomain(),
! controller.system().isPublic(),
useTesterCertificate,
testerFlavorFor(id, spec)
.map(NodeResources::fromLegacyName)
.orElse(zone.region().value().contains("aws-") ?
DEFAULT_TESTER_RESOURCES_AWS : DEFAULT_TESTER_RESOURCES));
byte[] testPackage = controller.applications().applicationStore().getTester(id.application().tenant(), id.application().application(), version);
byte[] deploymentXml = deploymentXml(id.tester(),
spec.requireInstance(id.application().instance()).athenzDomain(),
spec.requireInstance(id.application().instance()).athenzService(zone.environment(), zone.region()));
try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) {
zipBuilder.add(testPackage);
zipBuilder.add("services.xml", servicesXml);
zipBuilder.add("deployment.xml", deploymentXml);
if (useTesterCertificate)
appendAndStoreCertificate(zipBuilder, id);
zipBuilder.close();
return new ApplicationPackage(zipBuilder.toByteArray());
}
}
private void appendAndStoreCertificate(ZipBuilder zipBuilder, RunId id) {
KeyPair keyPair = KeyUtils.generateKeypair(KeyAlgorithm.RSA, 2048);
X500Principal subject = new X500Principal("CN=" + id.tester().id().toFullString() + "." + id.type() + "." + id.number());
X509Certificate certificate = X509CertificateBuilder.fromKeypair(keyPair,
subject,
controller.clock().instant(),
controller.clock().instant().plus(certificateTimeout),
SignatureAlgorithm.SHA512_WITH_RSA,
BigInteger.valueOf(1))
.build();
controller.jobController().storeTesterCertificate(id, certificate);
zipBuilder.add("artifacts/key", KeyUtils.toPem(keyPair.getPrivate()).getBytes(UTF_8));
zipBuilder.add("artifacts/cert", X509CertificateUtils.toPem(certificate).getBytes(UTF_8));
}
private static Optional<String> testerFlavorFor(RunId id, DeploymentSpec spec) {
for (DeploymentSpec.Step step : spec.steps())
if (step.deploysTo(id.type().environment()))
return step.zones().get(0).testerFlavor();
throw new IllegalStateException("No step deploys to the zone this run is for!");
}
/** Returns the generated services.xml content for the tester application. */
static byte[] servicesXml(AthenzDomain domain, boolean useAthenzCredentials, boolean useTesterCertificate,
NodeResources resources) {
int jdiscMemoryGb = 2;
int jdiscMemoryPct = (int) Math.ceil(100 * jdiscMemoryGb / resources.memoryGb());
int testMemoryMb = (int) (1024 * (resources.memoryGb() - jdiscMemoryGb) / 2);
String resourceString = String.format(Locale.ENGLISH,
"<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\"/>",
resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name());
/* TODO after 18 November 2019, include storageType:
String resourceString = String.format(Locale.ENGLISH,
"<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\" storage-type=\"%s\"/>",
resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name(), resources.storageType().name());
*/
AthenzDomain idDomain = ("vespa.vespa.cd".equals(domain.value()) ? AthenzDomain.from("vespa.vespa") : domain);
String servicesXml =
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<services xmlns:deploy='vespa' version='1.0'>\n" +
" <container version='1.0' id='tester'>\n" +
"\n" +
" <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" +
" <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" +
" <artifactsPath>artifacts</artifactsPath>\n" +
" <surefireMemoryMb>" + testMemoryMb + "</surefireMemoryMb>\n" +
" <useAthenzCredentials>" + useAthenzCredentials + "</useAthenzCredentials>\n" +
" <useTesterCertificate>" + useTesterCertificate + "</useTesterCertificate>\n" +
" </config>\n" +
" </component>\n" +
"\n" +
" <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" +
" <binding>http:
" </handler>\n" +
"\n" +
" <http>\n" +
" <!-- Make sure 4080 is the first port. This will be used by the config server. -->\n" +
" <server id='default' port='4080'/>\n" +
" <server id='testertls4443' port='4443'>\n" +
" <config name=\"jdisc.http.connector\">\n" +
" <tlsClientAuthEnforcer>\n" +
" <enable>true</enable>\n" +
" <pathWhitelist>\n" +
" <item>/status.html</item>\n" +
" <item>/state/v1/config</item>\n" +
" </pathWhitelist>\n" +
" </tlsClientAuthEnforcer>\n" +
" </config>\n" +
" <ssl>\n" +
" <private-key-file>/var/lib/sia/keys/" + idDomain.value() + ".tenant.key.pem</private-key-file>\n" +
" <certificate-file>/var/lib/sia/certs/" + idDomain.value() + ".tenant.cert.pem</certificate-file>\n" +
" <ca-certificates-file>/opt/yahoo/share/ssl/certs/athenz_certificate_bundle.pem</ca-certificates-file>\n" +
" <client-authentication>want</client-authentication>\n" +
" </ssl>\n" +
" </server>\n" +
" <filtering>\n" +
" <access-control domain='" + domain.value() + "'>\n" +
" <exclude>\n" +
" <binding>http:
" </exclude>\n" +
" </access-control>\n" +
" <request-chain id=\"testrunner-api\">\n" +
" <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" +
" <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" +
" <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" +
" </config>\n" +
" <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" +
" <resourceName>" + domain.value() + ":tester-application</resourceName>\n" +
" <action>deploy</action>\n" +
" </config>\n" +
" </component>\n" +
" </filter>\n" +
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n" +
"\n" +
" <nodes count=\"1\" allocated-memory=\"" + jdiscMemoryPct + "%\">\n" +
" " + resourceString + "\n" +
" </nodes>\n" +
" </container>\n" +
"</services>\n";
return servicesXml.getBytes(UTF_8);
}
/** Returns a dummy deployment xml which sets up the service identity for the tester, if present. */
private static byte[] deploymentXml(TesterId id, Optional<AthenzDomain> athenzDomain, Optional<AthenzService> athenzService) {
String deploymentSpec =
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<deployment version=\"1.0\" " +
athenzDomain.map(domain -> "athenz-domain=\"" + domain.value() + "\" ").orElse("") +
athenzService.map(service -> "athenz-service=\"" + service.value() + "\" ").orElse("") + ">" +
" <instance id=\"" + id.id().instance().value() + "\" />" +
"</deployment>";
return deploymentSpec.getBytes(UTF_8);
}
/** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */
private class DualLogger {
private final RunId id;
private final Step step;
private DualLogger(RunId id, Step step) {
this.id = id;
this.step = step;
}
private void log(String... messages) {
log(List.of(messages));
}
private void log(List<String> messages) {
controller.jobController().log(id, step, INFO, messages);
}
private void log(Level level, String message) {
log(level, message, null);
}
private void logWithInternalException(Level level, String message, Throwable thrown) {
logger.log(level, id + " at " + step + ": " + message, thrown);
controller.jobController().log(id, step, level, message);
}
private void log(Level level, String message, Throwable thrown) {
logger.log(level, id + " at " + step + ": " + message, thrown);
if (thrown != null) {
ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream();
thrown.printStackTrace(new PrintStream(traceBuffer));
message += "\n" + traceBuffer;
}
controller.jobController().log(id, step, level, message);
}
}
} | class InternalStepRunner implements StepRunner {
private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName());
private static final NodeResources DEFAULT_TESTER_RESOURCES =
new NodeResources(1, 4, 50, 0.3, NodeResources.DiskSpeed.any);
private static final NodeResources DEFAULT_TESTER_RESOURCES_AWS =
new NodeResources(2, 8, 50, 0.3, NodeResources.DiskSpeed.any);
static final Duration endpointTimeout = Duration.ofMinutes(15);
static final Duration testerTimeout = Duration.ofMinutes(30);
static final Duration installationTimeout = Duration.ofMinutes(150);
static final Duration certificateTimeout = Duration.ofMinutes(300);
private final Controller controller;
private final TestConfigSerializer testConfigSerializer;
private final DeploymentFailureMails mails;
public InternalStepRunner(Controller controller) {
this.controller = controller;
this.testConfigSerializer = new TestConfigSerializer(controller.system());
this.mails = new DeploymentFailureMails(controller.zoneRegistry());
}
@Override
public Optional<RunStatus> run(LockedStep step, RunId id) {
DualLogger logger = new DualLogger(id, step.get());
try {
switch (step.get()) {
case deployTester: return deployTester(id, logger);
case deployInitialReal: return deployInitialReal(id, logger);
case installInitialReal: return installInitialReal(id, logger);
case deployReal: return deployReal(id, logger);
case installTester: return installTester(id, logger);
case installReal: return installReal(id, logger);
case startTests: return startTests(id, logger);
case endTests: return endTests(id, logger);
case copyVespaLogs: return copyVespaLogs(id, logger);
case deactivateReal: return deactivateReal(id, logger);
case deactivateTester: return deactivateTester(id, logger);
case report: return report(id, logger);
default: throw new AssertionError("Unknown step '" + step + "'!");
}
}
catch (UncheckedIOException e) {
logger.logWithInternalException(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e), e);
return Optional.empty();
}
catch (RuntimeException e) {
logger.log(WARNING, "Unexpected exception running " + id, e);
if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) {
logger.log("Will keep trying, as this is a cleanup step.");
return Optional.empty();
}
return Optional.of(error);
}
}
private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " +
versions.sourcePlatform().orElse(versions.targetPlatform()) +
" and application version " +
versions.sourceApplication().orElse(versions.targetApplication()).id() + " ...");
return deployReal(id, true, versions, logger);
}
private Optional<RunStatus> deployReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " + versions.targetPlatform() +
" and application version " + versions.targetApplication().id() + " ...");
return deployReal(id, false, versions, logger);
}
private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, Versions versions, DualLogger logger) {
Optional<ApplicationPackage> applicationPackage = id.type().environment().isManuallyDeployed()
? Optional.of(new ApplicationPackage(controller.applications().applicationStore()
.getDev(id.application(), id.type().zone(controller.system()))))
: Optional.empty();
Optional<Version> vespaVersion = id.type().environment().isManuallyDeployed()
? Optional.of(versions.targetPlatform())
: Optional.empty();
return deploy(id.application(),
id.type(),
() -> controller.applications().deploy(id.application(),
id.type().zone(controller.system()),
applicationPackage,
new DeployOptions(false,
vespaVersion,
false,
setTheStage)),
logger);
}
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) {
Version platform = controller.jobController().run(id).get().versions().targetPlatform();
logger.log("Deploying the tester container on platform " + platform + " ...");
return deploy(id.tester().id(),
id.type(),
() -> controller.applications().deployTester(id.tester(),
testerPackage(id),
id.type().zone(controller.system()),
new DeployOptions(true,
Optional.of(platform),
false,
false)),
logger);
}
private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, DualLogger logger) {
try {
PrepareResponse prepareResponse = deployment.get().prepareResponse();
if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) {
List<String> messages = new ArrayList<>();
messages.add("Deploy failed due to non-compatible changes that require re-feed.");
messages.add("Your options are:");
messages.add("1. Revert the incompatible changes.");
messages.add("2. If you think it is safe in your case, you can override this validation, see");
messages.add(" http:
messages.add("3. Deploy as a new application under a different name.");
messages.add("Illegal actions:");
prepareResponse.configChangeActions.refeedActions.stream()
.filter(action -> ! action.allowed)
.flatMap(action -> action.messages.stream())
.forEach(messages::add);
messages.add("Details:");
prepareResponse.log.stream()
.map(entry -> entry.message)
.forEach(messages::add);
logger.log(messages);
return Optional.of(deploymentFailed);
}
if (prepareResponse.configChangeActions.restartActions.isEmpty())
logger.log("No services requiring restart.");
else
prepareResponse.configChangeActions.restartActions.stream()
.flatMap(action -> action.services.stream())
.map(service -> service.hostName)
.sorted().distinct()
.map(Hostname::new)
.forEach(hostname -> {
controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname));
logger.log("Restarting services on host " + hostname.id() + ".");
});
logger.log("Deployment successful.");
if (prepareResponse.message != null)
logger.log(prepareResponse.message);
return Optional.of(running);
}
catch (ConfigServerException e) {
if ( e.getErrorCode() == ACTIVATION_CONFLICT
|| e.getErrorCode() == APPLICATION_LOCK_FAILURE
|| e.getErrorCode() == PARENT_HOST_NOT_READY
|| e.getErrorCode() == CERTIFICATE_NOT_READY
|| e.getErrorCode() == LOAD_BALANCER_NOT_READY) {
logger.log("Will retry, because of '" + e.getErrorCode() + "' deploying:\n" + e.getMessage());
return Optional.empty();
}
if ( e.getErrorCode() == INVALID_APPLICATION_PACKAGE
|| e.getErrorCode() == BAD_REQUEST
|| e.getErrorCode() == OUT_OF_CAPACITY) {
logger.log("Deployment failed: " + e.getMessage());
return Optional.of(e.getErrorCode() == OUT_OF_CAPACITY ? outOfCapacity : deploymentFailed);
}
throw e;
}
}
private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) {
return installReal(id, true, logger);
}
private Optional<RunStatus> installReal(RunId id, DualLogger logger) {
return installReal(id, false, logger);
}
private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) {
Optional<Deployment> deployment = deployment(id.application(), id.type());
if ( ! deployment.isPresent()) {
logger.log(INFO, "Deployment expired before installation was successful.");
return Optional.of(installationFailed);
}
Versions versions = controller.jobController().run(id).get().versions();
Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform();
ApplicationVersion application = setTheStage ? versions.sourceApplication().orElse(versions.targetApplication()) : versions.targetApplication();
logger.log("Checking installation of " + platform + " and " + application.id() + " ...");
if ( nodesConverged(id.application(), id.type(), platform, logger)
&& servicesConverged(id.application(), id.type(), platform, logger)) {
if (endpointsAvailable(id.application(), id.type().zone(controller.system()), logger)) {
if (containersAreUp(id.application(), id.type().zone(controller.system()), logger)) {
logger.log("Installation succeeded!");
return Optional.of(running);
}
}
else if (timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
}
if (timedOut(id, deployment.get(), installationTimeout)) {
logger.log(INFO, "Installation failed to complete within " + installationTimeout.toMinutes() + " minutes!");
return Optional.of(installationFailed);
}
logger.log("Installation not yet complete.");
return Optional.empty();
}
private Optional<RunStatus> installTester(RunId id, DualLogger logger) {
Optional<Deployment> deployment = deployment(id.application(), id.type());
if ( ! deployment.isPresent()) {
logger.log(WARNING, "Deployment expired before installation of tester was successful.");
return Optional.of(error);
}
Version platform = controller.jobController().run(id).get().versions().targetPlatform();
logger.log("Checking installation of tester container ...");
if ( nodesConverged(id.tester().id(), id.type(), platform, logger)
&& servicesConverged(id.tester().id(), id.type(), platform, logger)) {
if (endpointsAvailable(id.tester().id(), id.type().zone(controller.system()), logger)) {
if (containersAreUp(id.tester().id(), id.type().zone(controller.system()), logger)) {
logger.log("Tester container successfully installed!");
return Optional.of(running);
}
}
else if (timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Tester failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
}
if (timedOut(id, deployment.get(), testerTimeout)) {
logger.log(WARNING, "Installation of tester failed to complete within " + testerTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Installation of tester not yet complete.");
return Optional.empty();
}
/** Returns true iff all containers in the deployment give 100 consecutive 200 OK responses on /status.html. */
private boolean containersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) {
var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zoneId)));
if ( ! endpoints.containsKey(zoneId))
return false;
for (URI endpoint : endpoints.get(zoneId).values()) {
boolean ready = id.instance().isTester() ?
controller.jobController().cloud().testerReady(endpoint)
: controller.jobController().cloud().ready(endpoint);
if (!ready) {
logger.log("Failed to get 100 consecutive OKs from " + endpoint);
return false;
}
}
return true;
}
private boolean endpointsAvailable(ApplicationId id, ZoneId zone, DualLogger logger) {
logger.log("Attempting to find deployment endpoints ...");
var endpoints = controller.applications().clusterEndpoints(Set.of(new DeploymentId(id, zone)));
if ( ! endpoints.containsKey(zone)) {
logger.log("Endpoints not yet ready.");
return false;
}
for (var endpoint : endpoints.get(zone).values())
if ( ! controller.jobController().cloud().exists(endpoint)) {
logger.log(INFO, "DNS lookup yielded no IP address for '" + endpoint + "'.");
return false;
}
logEndpoints(endpoints, logger);
return true;
}
private void logEndpoints(Map<ZoneId, Map<ClusterSpec.Id, URI>> endpoints, DualLogger logger) {
List<String> messages = new ArrayList<>();
messages.add("Found endpoints:");
endpoints.forEach((zone, uris) -> {
messages.add("- " + zone);
uris.forEach((cluster, uri) -> messages.add(" |-- " + uri + " (" + cluster + ")"));
});
logger.log(messages);
}
private boolean nodesConverged(ApplicationId id, JobType type, Version target, DualLogger logger) {
List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(type.zone(controller.system()), id, ImmutableSet.of(active, reserved));
List<String> statuses = nodes.stream()
.map(node -> String.format("%70s: %-16s%-25s%-32s%s",
node.hostname(),
node.serviceState(),
node.wantedVersion() + (node.currentVersion().equals(node.wantedVersion()) ? "" : " <-- " + node.currentVersion()),
node.restartGeneration() >= node.wantedRestartGeneration() ? ""
: "restart pending (" + node.wantedRestartGeneration() + " <-- " + node.restartGeneration() + ")",
node.rebootGeneration() >= node.wantedRebootGeneration() ? ""
: "reboot pending (" + node.wantedRebootGeneration() + " <-- " + node.rebootGeneration() + ")"))
.collect(Collectors.toList());
logger.log(statuses);
return nodes.stream().allMatch(node -> node.currentVersion().equals(target)
&& node.restartGeneration() >= node.wantedRestartGeneration()
&& node.rebootGeneration() >= node.wantedRebootGeneration());
}
private boolean servicesConverged(ApplicationId id, JobType type, Version platform, DualLogger logger) {
var convergence = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(id, type.zone(controller.system())),
Optional.of(platform));
if (convergence.isEmpty()) {
logger.log("Config status not currently available -- will retry.");
return false;
}
logger.log("Wanted config generation is " + convergence.get().wantedGeneration());
List<String> statuses = convergence.get().services().stream()
.filter(serviceStatus -> serviceStatus.currentGeneration() != convergence.get().wantedGeneration())
.map(serviceStatus -> String.format("%70s: %11s on port %4d has config generation %s",
serviceStatus.host().value(),
serviceStatus.type(),
serviceStatus.port(),
serviceStatus.currentGeneration() == -1 ? "not started!" : Long.toString(serviceStatus.currentGeneration())))
.collect(Collectors.toList());
logger.log(statuses);
if (statuses.isEmpty())
logger.log("All services on wanted config generation.");
return convergence.get().converged();
}
private Optional<RunStatus> startTests(RunId id, DualLogger logger) {
Optional<Deployment> deployment = deployment(id.application(), id.type());
if (deployment.isEmpty()) {
logger.log(INFO, "Deployment expired before tests could start.");
return Optional.of(aborted);
}
var deployments = controller.applications().requireInstance(id.application())
.productionDeployments().keySet().stream()
.map(zone -> new DeploymentId(id.application(), zone))
.collect(Collectors.toSet());
deployments.add(new DeploymentId(id.application(), id.type().zone(controller.system())));
logger.log("Attempting to find endpoints ...");
var endpoints = controller.applications().clusterEndpoints(deployments);
if ( ! endpoints.containsKey(id.type().zone(controller.system())) && timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Endpoints for the deployment to test vanished again, while it was still active!");
return Optional.of(error);
}
logEndpoints(endpoints, logger);
Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id);
if (testerEndpoint.isEmpty() && timedOut(id, deployment.get(), endpointTimeout)) {
logger.log(WARNING, "Endpoints for the tester container vanished again, while it was still active!");
return Optional.of(error);
}
if ( ! controller.jobController().cloud().testerReady(testerEndpoint.get())) {
logger.log(WARNING, "Tester container went bad!");
return Optional.of(error);
}
logger.log("Starting tests ...");
controller.jobController().cloud().startTests(testerEndpoint.get(),
TesterCloud.Suite.of(id.type()),
testConfigSerializer.configJson(id.application(),
id.type(),
true,
endpoints,
controller.applications().contentClustersByZone(deployments)));
return Optional.of(running);
}
private Optional<RunStatus> endTests(RunId id, DualLogger logger) {
if ( ! deployment(id.application(), id.type()).isPresent()) {
logger.log(INFO, "Deployment expired before tests could complete.");
return Optional.of(aborted);
}
Optional<X509Certificate> testerCertificate = controller.jobController().run(id).get().testerCertificate();
if (testerCertificate.isPresent()) {
try {
testerCertificate.get().checkValidity(Date.from(controller.clock().instant()));
}
catch (CertificateExpiredException | CertificateNotYetValidException e) {
logger.log(INFO, "Tester certificate expired before tests could complete.");
return Optional.of(aborted);
}
}
Optional<URI> testerEndpoint = controller.jobController().testerEndpoint(id);
if ( ! testerEndpoint.isPresent()) {
logger.log("Endpoints for tester not found -- trying again later.");
return Optional.empty();
}
controller.jobController().updateTestLog(id);
TesterCloud.Status testStatus = controller.jobController().cloud().getStatus(testerEndpoint.get());
switch (testStatus) {
case NOT_STARTED:
throw new IllegalStateException("Tester reports tests not started, even though they should have!");
case RUNNING:
return Optional.empty();
case FAILURE:
logger.log("Tests failed.");
return Optional.of(testFailure);
case ERROR:
logger.log(INFO, "Tester failed running its tests!");
return Optional.of(error);
case SUCCESS:
logger.log("Tests completed successfully.");
return Optional.of(running);
default:
throw new IllegalStateException("Unknown status '" + testStatus + "'!");
}
}
private Optional<RunStatus> copyVespaLogs(RunId id, DualLogger logger) {
if (deployment(id.application(), id.type()).isPresent())
try {
controller.jobController().updateVespaLog(id);
}
catch (Exception e) {
logger.log(INFO, "Failure getting vespa logs for " + id, e);
return Optional.of(error);
}
return Optional.of(running);
}
private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) {
try {
return retrying(10, () -> {
logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.jobController().deactivateTester(id.tester(), id.type());
return running;
});
}
catch (RuntimeException e) {
logger.log(WARNING, "Failed deleting tester of " + id.application(), e);
return Optional.of(error);
}
}
private static Optional<RunStatus> retrying(int retries, Supplier<RunStatus> task) {
RuntimeException exception = null;
do {
try {
return Optional.of(task.get());
}
catch (RuntimeException e) {
if (exception == null)
exception = e;
else
exception.addSuppressed(e);
}
} while (--retries >= 0);
throw exception;
}
private Optional<RunStatus> report(RunId id, DualLogger logger) {
try {
controller.jobController().active(id).ifPresent(run -> {
if (run.hasFailed())
sendNotification(run, logger);
});
}
catch (IllegalStateException e) {
logger.log(INFO, "Job '" + id.type() + "' no longer supposed to run?", e);
return Optional.of(error);
}
return Optional.of(running);
}
/** Sends a mail with a notification of a failed run, if one should be sent. */
private void sendNotification(Run run, DualLogger logger) {
Application application = controller.applications().requireApplication(TenantAndApplicationId.from(run.id().application()));
Notifications notifications = application.deploymentSpec().requireInstance(run.id().application().instance()).notifications();
boolean newCommit = application.change().application()
.map(run.versions().targetApplication()::equals)
.orElse(false);
When when = newCommit ? failingCommit : failing;
List<String> recipients = new ArrayList<>(notifications.emailAddressesFor(when));
if (notifications.emailRolesFor(when).contains(author))
run.versions().targetApplication().authorEmail().ifPresent(recipients::add);
if (recipients.isEmpty())
return;
try {
if (run.status() == outOfCapacity && run.id().type().isProduction())
controller.serviceRegistry().mailer().send(mails.outOfCapacity(run.id(), recipients));
if (run.status() == deploymentFailed)
controller.serviceRegistry().mailer().send(mails.deploymentFailure(run.id(), recipients));
if (run.status() == installationFailed)
controller.serviceRegistry().mailer().send(mails.installationFailure(run.id(), recipients));
if (run.status() == testFailure)
controller.serviceRegistry().mailer().send(mails.testFailure(run.id(), recipients));
if (run.status() == error)
controller.serviceRegistry().mailer().send(mails.systemError(run.id(), recipients));
}
catch (RuntimeException e) {
logger.log(INFO, "Exception trying to send mail for " + run.id(), e);
}
}
/** Returns the deployment of the real application in the zone of the given job, if it exists. */
private Optional<Deployment> deployment(ApplicationId id, JobType type) {
return Optional.ofNullable(application(id).deployments().get(type.zone(controller.system())));
}
/** Returns the real application with the given id. */
private Instance application(ApplicationId id) {
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), __ -> { });
return controller.applications().requireInstance(id);
}
/**
* Returns whether the time since deployment is more than the zone deployment expiry, or the given timeout.
*
* We time out the job before the deployment expires, for zone where deployments are not persistent,
* to be able to collect the Vespa log from the deployment. Thus, the lower of the zone's deployment expiry,
* and the given default installation timeout, minus one minute, is used as a timeout threshold.
*/
private boolean timedOut(RunId id, Deployment deployment, Duration defaultTimeout) {
Run run = controller.jobController().run(id).get();
if (run.start().isAfter(deployment.at()))
return false;
Duration timeout = controller.zoneRegistry().getDeploymentTimeToLive(deployment.zone())
.filter(zoneTimeout -> zoneTimeout.compareTo(defaultTimeout) < 0)
.orElse(defaultTimeout);
return deployment.at().isBefore(controller.clock().instant().minus(timeout.minus(Duration.ofMinutes(1))));
}
/** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */
private ApplicationPackage testerPackage(RunId id) {
ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication();
DeploymentSpec spec = controller.applications().requireApplication(TenantAndApplicationId.from(id.application())).deploymentSpec();
ZoneId zone = id.type().zone(controller.system());
boolean useTesterCertificate = controller.system().isPublic() && id.type().isTest();
byte[] servicesXml = servicesXml(controller.zoneRegistry().accessControlDomain(),
! controller.system().isPublic(),
useTesterCertificate,
testerFlavorFor(id, spec)
.map(NodeResources::fromLegacyName)
.orElse(zone.region().value().contains("aws-") ?
DEFAULT_TESTER_RESOURCES_AWS : DEFAULT_TESTER_RESOURCES));
byte[] testPackage = controller.applications().applicationStore().getTester(id.application().tenant(), id.application().application(), version);
byte[] deploymentXml = deploymentXml(id.tester(),
spec.requireInstance(id.application().instance()).athenzDomain(),
spec.requireInstance(id.application().instance()).athenzService(zone.environment(), zone.region()));
try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) {
zipBuilder.add(testPackage);
zipBuilder.add("services.xml", servicesXml);
zipBuilder.add("deployment.xml", deploymentXml);
if (useTesterCertificate)
appendAndStoreCertificate(zipBuilder, id);
zipBuilder.close();
return new ApplicationPackage(zipBuilder.toByteArray());
}
}
private void appendAndStoreCertificate(ZipBuilder zipBuilder, RunId id) {
KeyPair keyPair = KeyUtils.generateKeypair(KeyAlgorithm.RSA, 2048);
X500Principal subject = new X500Principal("CN=" + id.tester().id().toFullString() + "." + id.type() + "." + id.number());
X509Certificate certificate = X509CertificateBuilder.fromKeypair(keyPair,
subject,
controller.clock().instant(),
controller.clock().instant().plus(certificateTimeout),
SignatureAlgorithm.SHA512_WITH_RSA,
BigInteger.valueOf(1))
.build();
controller.jobController().storeTesterCertificate(id, certificate);
zipBuilder.add("artifacts/key", KeyUtils.toPem(keyPair.getPrivate()).getBytes(UTF_8));
zipBuilder.add("artifacts/cert", X509CertificateUtils.toPem(certificate).getBytes(UTF_8));
}
private static Optional<String> testerFlavorFor(RunId id, DeploymentSpec spec) {
for (DeploymentSpec.Step step : spec.steps())
if (step.deploysTo(id.type().environment()))
return step.zones().get(0).testerFlavor();
throw new IllegalStateException("No step deploys to the zone this run is for!");
}
/** Returns the generated services.xml content for the tester application. */
static byte[] servicesXml(AthenzDomain domain, boolean useAthenzCredentials, boolean useTesterCertificate,
NodeResources resources) {
int jdiscMemoryGb = 2;
int jdiscMemoryPct = (int) Math.ceil(100 * jdiscMemoryGb / resources.memoryGb());
int testMemoryMb = (int) (1024 * (resources.memoryGb() - jdiscMemoryGb) / 2);
String resourceString = String.format(Locale.ENGLISH,
"<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\"/>",
resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name());
/* TODO after 18 November 2019, include storageType:
String resourceString = String.format(Locale.ENGLISH,
"<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\" storage-type=\"%s\"/>",
resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name(), resources.storageType().name());
*/
AthenzDomain idDomain = ("vespa.vespa.cd".equals(domain.value()) ? AthenzDomain.from("vespa.vespa") : domain);
String servicesXml =
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<services xmlns:deploy='vespa' version='1.0'>\n" +
" <container version='1.0' id='tester'>\n" +
"\n" +
" <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" +
" <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" +
" <artifactsPath>artifacts</artifactsPath>\n" +
" <surefireMemoryMb>" + testMemoryMb + "</surefireMemoryMb>\n" +
" <useAthenzCredentials>" + useAthenzCredentials + "</useAthenzCredentials>\n" +
" <useTesterCertificate>" + useTesterCertificate + "</useTesterCertificate>\n" +
" </config>\n" +
" </component>\n" +
"\n" +
" <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" +
" <binding>http:
" </handler>\n" +
"\n" +
" <http>\n" +
" <!-- Make sure 4080 is the first port. This will be used by the config server. -->\n" +
" <server id='default' port='4080'/>\n" +
" <server id='testertls4443' port='4443'>\n" +
" <config name=\"jdisc.http.connector\">\n" +
" <tlsClientAuthEnforcer>\n" +
" <enable>true</enable>\n" +
" <pathWhitelist>\n" +
" <item>/status.html</item>\n" +
" <item>/state/v1/config</item>\n" +
" </pathWhitelist>\n" +
" </tlsClientAuthEnforcer>\n" +
" </config>\n" +
" <ssl>\n" +
" <private-key-file>/var/lib/sia/keys/" + idDomain.value() + ".tenant.key.pem</private-key-file>\n" +
" <certificate-file>/var/lib/sia/certs/" + idDomain.value() + ".tenant.cert.pem</certificate-file>\n" +
" <ca-certificates-file>/opt/yahoo/share/ssl/certs/athenz_certificate_bundle.pem</ca-certificates-file>\n" +
" <client-authentication>want</client-authentication>\n" +
" </ssl>\n" +
" </server>\n" +
" <filtering>\n" +
" <access-control domain='" + domain.value() + "'>\n" +
" <exclude>\n" +
" <binding>http:
" </exclude>\n" +
" </access-control>\n" +
" <request-chain id=\"testrunner-api\">\n" +
" <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" +
" <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" +
" <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" +
" </config>\n" +
" <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" +
" <resourceName>" + domain.value() + ":tester-application</resourceName>\n" +
" <action>deploy</action>\n" +
" </config>\n" +
" </component>\n" +
" </filter>\n" +
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n" +
"\n" +
" <nodes count=\"1\" allocated-memory=\"" + jdiscMemoryPct + "%\">\n" +
" " + resourceString + "\n" +
" </nodes>\n" +
" </container>\n" +
"</services>\n";
return servicesXml.getBytes(UTF_8);
}
/** Returns a dummy deployment xml which sets up the service identity for the tester, if present. */
private static byte[] deploymentXml(TesterId id, Optional<AthenzDomain> athenzDomain, Optional<AthenzService> athenzService) {
String deploymentSpec =
"<?xml version='1.0' encoding='UTF-8'?>\n" +
"<deployment version=\"1.0\" " +
athenzDomain.map(domain -> "athenz-domain=\"" + domain.value() + "\" ").orElse("") +
athenzService.map(service -> "athenz-service=\"" + service.value() + "\" ").orElse("") + ">" +
" <instance id=\"" + id.id().instance().value() + "\" />" +
"</deployment>";
return deploymentSpec.getBytes(UTF_8);
}
/** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */
private class DualLogger {
private final RunId id;
private final Step step;
private DualLogger(RunId id, Step step) {
this.id = id;
this.step = step;
}
private void log(String... messages) {
log(List.of(messages));
}
private void log(List<String> messages) {
controller.jobController().log(id, step, INFO, messages);
}
private void log(Level level, String message) {
log(level, message, null);
}
private void logWithInternalException(Level level, String message, Throwable thrown) {
logger.log(level, id + " at " + step + ": " + message, thrown);
controller.jobController().log(id, step, level, message);
}
private void log(Level level, String message, Throwable thrown) {
logger.log(level, id + " at " + step + ": " + message, thrown);
if (thrown != null) {
ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream();
thrown.printStackTrace(new PrintStream(traceBuffer));
message += "\n" + traceBuffer;
}
controller.jobController().log(id, step, level, message);
}
}
} |
oh no. how sad :( is this global for all Java? | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/resources/"
+ "sample-forms/forms/selectionMarkForm.pdf");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
String modelId = "{modelId}";
PollerFlux<FormRecognizerOperationResult, List<RecognizedForm>> recognizeFormPoller;
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
recognizeFormPoller = client.beginRecognizeCustomForms(modelId, toFluxByteBuffer(targetStream),
sourceFile.length(), new RecognizeCustomFormsOptions().setFieldElementsIncluded(true));
}
Mono<List<RecognizedForm>> recognizeFormResult =
recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm form = recognizedForms.get(i);
System.out.printf("----------- Recognized custom form info for page %d -----------%n", i);
System.out.printf("Form type: %s%n", form.getFormType());
System.out.printf("Form has form type confidence : %.2f%n", form.getFormTypeConfidence());
System.out.printf("Form was analyzed with model with ID: %s%n", form.getModelId());
form.getFields().forEach((label, formField) -> {
System.out.printf("Field '%s' has label '%s' with confidence score of %.2f.%n", label,
formField.getLabelData().getText(),
formField.getConfidence());
});
final List<FormPage> pages = form.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("------- Recognizing info on page %s of Form ------- %n", i1);
System.out.printf("Has width: %f, angle: %.2f, height: %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells()
.forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().stream()
.filter(formContent -> formContent instanceof FormSelectionMark)
.map(formContent -> (FormSelectionMark) (formContent))
.forEach(selectionMark ->
System.out.printf("Page: %s, Selection mark is %s within bounding box %s has a "
+ "confidence score %.2f.%n",
selectionMark.getPageNumber(),
selectionMark.getState(),
selectionMark.getBoundingBox().toString(),
selectionMark.getConfidence()));
});
System.out.println();
}
}
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/resources/" | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/resources/"
+ "sample-forms/forms/selectionMarkForm.pdf");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
String modelId = "{modelId}";
PollerFlux<FormRecognizerOperationResult, List<RecognizedForm>> recognizeFormPoller;
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
recognizeFormPoller = client.beginRecognizeCustomForms(modelId, toFluxByteBuffer(targetStream),
sourceFile.length(), new RecognizeCustomFormsOptions().setFieldElementsIncluded(true));
}
Mono<List<RecognizedForm>> recognizeFormResult =
recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm form = recognizedForms.get(i);
System.out.printf("----------- Recognized custom form info for page %d -----------%n", i);
System.out.printf("Form type: %s%n", form.getFormType());
System.out.printf("Form has form type confidence : %.2f%n", form.getFormTypeConfidence());
System.out.printf("Form was analyzed with model with ID: %s%n", form.getModelId());
form.getFields().forEach((label, formField) -> {
System.out.printf("Field '%s' has label '%s' with confidence score of %.2f.%n", label,
formField.getLabelData().getText(),
formField.getConfidence());
});
final List<FormPage> pages = form.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("------- Recognizing info on page %s of Form ------- %n", i1);
System.out.printf("Has width: %f, angle: %.2f, height: %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells()
.forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().stream()
.filter(formContent -> formContent instanceof FormSelectionMark)
.map(formContent -> (FormSelectionMark) (formContent))
.forEach(selectionMark ->
System.out.printf("Page: %s, Selection mark is %s within bounding box %s has a "
+ "confidence score %.2f.%n",
selectionMark.getPageNumber(),
selectionMark.getState(),
selectionMark.getBoundingBox().toString(),
selectionMark.getConfidence()));
});
System.out.println();
}
}
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class RecognizeCustomFormsAsyncWithSelectionMarks {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class RecognizeCustomFormsAsyncWithSelectionMarks {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
Step one, in order to be compatible, fe generate dsl can not affect exist logic, what do you think? | public String toJson() {
StringWriter writer = new StringWriter();
try {
JsonGenerator gen = mapper.getFactory().createGenerator(writer);
this.toJson(gen);
gen.flush();
gen.close();
} catch (IOException e) {
LOG.warn("QueryBuilder toJson error");
return null;
}
return writer.toString();
} | LOG.warn("QueryBuilder toJson error"); | public String toJson() {
StringWriter writer = new StringWriter();
try {
JsonGenerator gen = mapper.getFactory().createGenerator(writer);
this.toJson(gen);
gen.flush();
gen.close();
} catch (IOException e) {
LOG.warn("QueryBuilder toJson error", e);
return null;
}
return writer.toString();
} | class QueryBuilder {
private static final Logger LOG = LogManager.getLogger(QueryBuilder.class);
final ObjectMapper mapper = new ObjectMapper();
/**
* Convert query to JSON format
*
* @param out used to generate JSON elements
* @throws IOException if IO error occurred
*/
abstract void toJson(JsonGenerator out) throws IOException;
/**
* Convert query to JSON format and catch error.
**/
} | class QueryBuilder {
private static final Logger LOG = LogManager.getLogger(QueryBuilder.class);
final ObjectMapper mapper = new ObjectMapper();
/**
* Convert query to JSON format
*
* @param out used to generate JSON elements
* @throws IOException if IO error occurred
*/
abstract void toJson(JsonGenerator out) throws IOException;
/**
* Convert query to JSON format and catch error.
**/
} |
```suggestion // An object containing the details of the recently updated key rotation policy will be returned by the update method. ``` | public static void main(String[] args) {
KeyClient keyClient = new KeyClientBuilder()
.vaultUrl("https:
.credential(new DefaultAzureCredentialBuilder().build())
.buildClient();
KeyVaultKey originalKey = keyClient.createRsaKey(new CreateRsaKeyOptions("MyRsaKey").setKeySize(2048));
System.out.printf("Key created with name %s and type %s%n", originalKey.getName(), originalKey.getKeyType());
KeyVaultKey manuallyRotatedKey = keyClient.rotateKey("MyRsaKey");
System.out.printf("Rotated key with name %s%n", manuallyRotatedKey.getName());
List<KeyRotationLifetimeAction> keyRotationLifetimeActionList = new ArrayList<>();
KeyRotationLifetimeAction rotateLifetimeAction = new KeyRotationLifetimeAction(KeyRotationPolicyAction.ROTATE)
.setTimeAfterCreate("P90D");
keyRotationLifetimeActionList.add(rotateLifetimeAction);
KeyRotationPolicyProperties keyRotationPolicyProperties = new KeyRotationPolicyProperties()
.setLifetimeActions(keyRotationLifetimeActionList)
.setExpiryTime("P6M");
KeyRotationPolicy keyRotationPolicy =
keyClient.updateKeyRotationPolicy("MyRsaKey", keyRotationPolicyProperties);
System.out.printf("Updated key rotation policy with id: %s%n", keyRotationPolicy.getId());
} | public static void main(String[] args) {
KeyClient keyClient = new KeyClientBuilder()
.vaultUrl("https:
.credential(new DefaultAzureCredentialBuilder().build())
.buildClient();
String keyName = "MyKey";
KeyVaultKey originalKey = keyClient.createRsaKey(new CreateRsaKeyOptions(keyName).setKeySize(2048));
System.out.printf("Key created with name %s and type %s%n", originalKey.getName(), originalKey.getKeyType());
List<KeyRotationLifetimeAction> keyRotationLifetimeActionList = new ArrayList<>();
KeyRotationLifetimeAction rotateLifetimeAction = new KeyRotationLifetimeAction(KeyRotationPolicyAction.ROTATE)
.setTimeAfterCreate("P90D");
keyRotationLifetimeActionList.add(rotateLifetimeAction);
KeyRotationPolicyProperties keyRotationPolicyProperties = new KeyRotationPolicyProperties()
.setLifetimeActions(keyRotationLifetimeActionList)
.setExpiryTime("P6M");
KeyRotationPolicy keyRotationPolicy =
keyClient.updateKeyRotationPolicy(keyName, keyRotationPolicyProperties);
System.out.printf("Updated key rotation policy with id: %s%n", keyRotationPolicy.getId());
KeyVaultKey manuallyRotatedKey = keyClient.rotateKey(keyName);
System.out.printf("Rotated key with name %s%n", manuallyRotatedKey.getName());
} | class KeyRotation {
/**
* Authenticates with the key vault and shows set key rotation policies and manually rotate keys in Key Vault to
* create a new key version.
*
* @param args Unused. Arguments to the program.
* @throws IllegalArgumentException when an invalid key vault endpoint is passed.
*/
} | class KeyRotation {
/**
* Authenticates with the key vault and shows set key rotation policies and manually rotate keys in Key Vault to
* create a new key version.
*
* @param args Unused. Arguments to the program.
* @throws IllegalArgumentException when an invalid key vault endpoint is passed.
*/
} |
|
If more than one of WANT_TO_RETIRE, ipAddresses, additionalIpAddresses are specified in the patch request, then this PR will invoke `nodeRepository.list(lock)` that many times. This happens e.g. on provisioning, when the last two are specified. At the risk of making this much more convoluted, consider listing at most once. | private Node applyField(Node node, String name, Inspector value) {
switch (name) {
case "currentRebootGeneration" :
return node.withCurrentRebootGeneration(asLong(value), clock.instant());
case "currentRestartGeneration" :
return patchCurrentRestartGeneration(asLong(value));
case "currentDockerImage" :
if (node.flavor().getType() != Flavor.Type.DOCKER_CONTAINER)
throw new IllegalArgumentException("Docker image can only be set for docker containers");
return node.with(node.status().withDockerImage(DockerImage.fromString(asString(value))));
case "vespaVersion" :
case "currentVespaVersion" :
return node.with(node.status().withVespaVersion(Version.fromString(asString(value))));
case "currentOsVersion" :
return node.withCurrentOsVersion(Version.fromString(asString(value)), clock.instant());
case "currentFirmwareCheck":
return node.withFirmwareVerifiedAt(Instant.ofEpochMilli(asLong(value)));
case "failCount" :
return node.with(node.status().setFailCount(asLong(value).intValue()));
case "flavor" :
return node.with(nodeFlavors.getFlavorOrThrow(asString(value)));
case "parentHostname" :
return node.withParentHostname(asString(value));
case "ipAddresses" :
return IP.Config.verify(node.with(node.ipConfig().with(asStringSet(value))), nodes.get());
case "additionalIpAddresses" :
return IP.Config.verify(node.with(node.ipConfig().with(IP.Pool.of(asStringSet(value)))), nodes.get());
case WANT_TO_RETIRE :
return node.withWantToRetire(asBoolean(value), Agent.operator, clock.instant());
case "wantToDeprovision" :
if (node.type() != NodeType.host && asBoolean(value))
throw new IllegalArgumentException("wantToDeprovision can only be set for hosts");
return node.with(node.status().withWantToDeprovision(asBoolean(value)));
case "reports" :
return nodeWithPatchedReports(node, value);
case "openStackId" :
return node.withOpenStackId(asString(value));
case "diskGb":
case "minDiskAvailableGb":
return node.with(node.flavor().with(node.flavor().resources().withDiskGb(value.asDouble())));
case "memoryGb":
case "minMainMemoryAvailableGb":
return node.with(node.flavor().with(node.flavor().resources().withMemoryGb(value.asDouble())));
case "vcpu":
case "minCpuCores":
return node.with(node.flavor().with(node.flavor().resources().withVcpu(value.asDouble())));
case "fastDisk":
return node.with(node.flavor().with(node.flavor().resources().with(value.asBool() ? fast : slow)));
case "remoteStorage":
return node.with(node.flavor().with(node.flavor().resources().with(value.asBool() ? remote : local)));
case "bandwidthGbps":
return node.with(node.flavor().with(node.flavor().resources().withBandwidthGbps(value.asDouble())));
case "modelName":
return value.type() == Type.NIX ? node.withoutModelName() : node.withModelName(asString(value));
case "requiredDiskSpeed":
return patchRequiredDiskSpeed(asString(value));
case "reservedTo":
return value.type() == Type.NIX ? node.withoutReservedTo() : node.withReservedTo(TenantName.from(value.asString()));
default :
throw new IllegalArgumentException("Could not apply field '" + name + "' on a node: No such modifiable field");
}
} | return IP.Config.verify(node.with(node.ipConfig().with(IP.Pool.of(asStringSet(value)))), nodes.get()); | private Node applyField(Node node, String name, Inspector value) {
switch (name) {
case "currentRebootGeneration" :
return node.withCurrentRebootGeneration(asLong(value), clock.instant());
case "currentRestartGeneration" :
return patchCurrentRestartGeneration(asLong(value));
case "currentDockerImage" :
if (node.flavor().getType() != Flavor.Type.DOCKER_CONTAINER)
throw new IllegalArgumentException("Docker image can only be set for docker containers");
return node.with(node.status().withDockerImage(DockerImage.fromString(asString(value))));
case "vespaVersion" :
case "currentVespaVersion" :
return node.with(node.status().withVespaVersion(Version.fromString(asString(value))));
case "currentOsVersion" :
return node.withCurrentOsVersion(Version.fromString(asString(value)), clock.instant());
case "currentFirmwareCheck":
return node.withFirmwareVerifiedAt(Instant.ofEpochMilli(asLong(value)));
case "failCount" :
return node.with(node.status().setFailCount(asLong(value).intValue()));
case "flavor" :
return node.with(nodeFlavors.getFlavorOrThrow(asString(value)));
case "parentHostname" :
return node.withParentHostname(asString(value));
case "ipAddresses" :
return IP.Config.verify(node.with(node.ipConfig().with(asStringSet(value))), nodes.get());
case "additionalIpAddresses" :
return IP.Config.verify(node.with(node.ipConfig().with(IP.Pool.of(asStringSet(value)))), nodes.get());
case WANT_TO_RETIRE :
return node.withWantToRetire(asBoolean(value), Agent.operator, clock.instant());
case "wantToDeprovision" :
if (node.type() != NodeType.host && asBoolean(value))
throw new IllegalArgumentException("wantToDeprovision can only be set for hosts");
return node.with(node.status().withWantToDeprovision(asBoolean(value)));
case "reports" :
return nodeWithPatchedReports(node, value);
case "openStackId" :
return node.withOpenStackId(asString(value));
case "diskGb":
case "minDiskAvailableGb":
return node.with(node.flavor().with(node.flavor().resources().withDiskGb(value.asDouble())));
case "memoryGb":
case "minMainMemoryAvailableGb":
return node.with(node.flavor().with(node.flavor().resources().withMemoryGb(value.asDouble())));
case "vcpu":
case "minCpuCores":
return node.with(node.flavor().with(node.flavor().resources().withVcpu(value.asDouble())));
case "fastDisk":
return node.with(node.flavor().with(node.flavor().resources().with(value.asBool() ? fast : slow)));
case "remoteStorage":
return node.with(node.flavor().with(node.flavor().resources().with(value.asBool() ? remote : local)));
case "bandwidthGbps":
return node.with(node.flavor().with(node.flavor().resources().withBandwidthGbps(value.asDouble())));
case "modelName":
return value.type() == Type.NIX ? node.withoutModelName() : node.withModelName(asString(value));
case "requiredDiskSpeed":
return patchRequiredDiskSpeed(asString(value));
case "reservedTo":
return value.type() == Type.NIX ? node.withoutReservedTo() : node.withReservedTo(TenantName.from(value.asString()));
default :
throw new IllegalArgumentException("Could not apply field '" + name + "' on a node: No such modifiable field");
}
} | class NodePatcher {
private static final String WANT_TO_RETIRE = "wantToRetire";
private final NodeFlavors nodeFlavors;
private final Inspector inspector;
private final Supplier<LockedNodeList> nodes;
private final Clock clock;
private Node node;
public NodePatcher(NodeFlavors nodeFlavors, InputStream json, Node node, Supplier<LockedNodeList> nodes, Clock clock) {
this.nodeFlavors = nodeFlavors;
this.node = node;
this.nodes = nodes;
this.clock = clock;
try {
this.inspector = SlimeUtils.jsonToSlime(IOUtils.readBytes(json, 1000 * 1000)).get();
} catch (IOException e) {
throw new UncheckedIOException("Error reading request body", e);
}
}
/**
* Apply the json to the node and return all nodes affected by the patch.
* More than 1 node may be affected if e.g. the node is a Docker host, which may have
* children that must be updated in a consistent manner.
*/
public List<Node> apply() {
List<Node> patchedNodes = new ArrayList<>();
inspector.traverse((String name, Inspector value) -> {
try {
node = applyField(node, name, value);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Could not set field '" + name + "'", e);
}
try {
patchedNodes.addAll(applyFieldRecursive(name, value));
} catch (IllegalArgumentException e) {
}
} );
patchedNodes.add(node);
return patchedNodes;
}
private List<Node> applyFieldRecursive(String name, Inspector value) {
switch (name) {
case WANT_TO_RETIRE:
List<Node> childNodes = node.type().isDockerHost() ? nodes.get().childrenOf(node).asList() : List.of();
return childNodes.stream()
.map(child -> applyField(child, name, value))
.collect(Collectors.toList());
default :
throw new IllegalArgumentException("Field " + name + " is not recursive");
}
}
private Node nodeWithPatchedReports(Node node, Inspector reportsInspector) {
Node patchedNode;
if (reportsInspector.type() == Type.NIX) {
patchedNode = node.with(new Reports());
} else {
var reportsBuilder = new Reports.Builder(node.reports());
reportsInspector.traverse((ObjectTraverser) (reportId, reportInspector) -> {
if (reportInspector.type() == Type.NIX) {
reportsBuilder.clearReport(reportId);
} else {
reportsBuilder.setReport(Report.fromSlime(reportId, reportInspector));
}
});
patchedNode = node.with(reportsBuilder.build());
}
boolean hadHardFailReports = node.reports().getReports().stream()
.anyMatch(r -> r.getType() == Report.Type.HARD_FAIL);
boolean hasHardFailReports = patchedNode.reports().getReports().stream()
.anyMatch(r -> r.getType() == Report.Type.HARD_FAIL);
if (hadHardFailReports != hasHardFailReports) {
if ((hasHardFailReports && node.state() == Node.State.failed) || node.state() == Node.State.parked)
return patchedNode;
patchedNode = patchedNode.with(patchedNode.status().withWantToDeprovision(hasHardFailReports));
}
return patchedNode;
}
private Set<String> asStringSet(Inspector field) {
if ( ! field.type().equals(Type.ARRAY))
throw new IllegalArgumentException("Expected an ARRAY value, got a " + field.type());
TreeSet<String> strings = new TreeSet<>();
for (int i = 0; i < field.entries(); i++) {
Inspector entry = field.entry(i);
if ( ! entry.type().equals(Type.STRING))
throw new IllegalArgumentException("Expected a STRING value, got a " + entry.type());
strings.add(entry.asString());
}
return strings;
}
private Node patchRequiredDiskSpeed(String value) {
Optional<Allocation> allocation = node.allocation();
if (allocation.isPresent())
return node.with(allocation.get().withRequestedResources(
allocation.get().requestedResources().with(NodeResources.DiskSpeed.valueOf(value))));
else
throw new IllegalArgumentException("Node is not allocated");
}
private Node patchCurrentRestartGeneration(Long value) {
Optional<Allocation> allocation = node.allocation();
if (allocation.isPresent())
return node.with(allocation.get().withRestart(allocation.get().restartGeneration().withCurrent(value)));
else
throw new IllegalArgumentException("Node is not allocated");
}
private Long asLong(Inspector field) {
if ( ! field.type().equals(Type.LONG))
throw new IllegalArgumentException("Expected a LONG value, got a " + field.type());
return field.asLong();
}
private String asString(Inspector field) {
if ( ! field.type().equals(Type.STRING))
throw new IllegalArgumentException("Expected a STRING value, got a " + field.type());
return field.asString();
}
private Optional<String> asOptionalString(Inspector field) {
return field.type().equals(Type.NIX) ? Optional.empty() : Optional.of(asString(field));
}
private Optional<String> removeQuotedNulls(Optional<String> value) {
return value.filter(v -> !v.equals("null"));
}
private boolean asBoolean(Inspector field) {
if ( ! field.type().equals(Type.BOOL))
throw new IllegalArgumentException("Expected a BOOL value, got a " + field.type());
return field.asBool();
}
} | class NodePatcher {
private static final String WANT_TO_RETIRE = "wantToRetire";
private final NodeFlavors nodeFlavors;
private final Inspector inspector;
private final Supplier<LockedNodeList> nodes;
private final Clock clock;
private Node node;
public NodePatcher(NodeFlavors nodeFlavors, InputStream json, Node node, Supplier<LockedNodeList> nodes, Clock clock) {
this.nodeFlavors = nodeFlavors;
this.node = node;
this.nodes = nodes;
this.clock = clock;
try {
this.inspector = SlimeUtils.jsonToSlime(IOUtils.readBytes(json, 1000 * 1000)).get();
} catch (IOException e) {
throw new UncheckedIOException("Error reading request body", e);
}
}
/**
* Apply the json to the node and return all nodes affected by the patch.
* More than 1 node may be affected if e.g. the node is a Docker host, which may have
* children that must be updated in a consistent manner.
*/
public List<Node> apply() {
List<Node> patchedNodes = new ArrayList<>();
inspector.traverse((String name, Inspector value) -> {
try {
node = applyField(node, name, value);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Could not set field '" + name + "'", e);
}
try {
patchedNodes.addAll(applyFieldRecursive(name, value));
} catch (IllegalArgumentException e) {
}
} );
patchedNodes.add(node);
return patchedNodes;
}
private List<Node> applyFieldRecursive(String name, Inspector value) {
switch (name) {
case WANT_TO_RETIRE:
List<Node> childNodes = node.type().isDockerHost() ? nodes.get().childrenOf(node).asList() : List.of();
return childNodes.stream()
.map(child -> applyField(child, name, value))
.collect(Collectors.toList());
default :
throw new IllegalArgumentException("Field " + name + " is not recursive");
}
}
private Node nodeWithPatchedReports(Node node, Inspector reportsInspector) {
Node patchedNode;
if (reportsInspector.type() == Type.NIX) {
patchedNode = node.with(new Reports());
} else {
var reportsBuilder = new Reports.Builder(node.reports());
reportsInspector.traverse((ObjectTraverser) (reportId, reportInspector) -> {
if (reportInspector.type() == Type.NIX) {
reportsBuilder.clearReport(reportId);
} else {
reportsBuilder.setReport(Report.fromSlime(reportId, reportInspector));
}
});
patchedNode = node.with(reportsBuilder.build());
}
boolean hadHardFailReports = node.reports().getReports().stream()
.anyMatch(r -> r.getType() == Report.Type.HARD_FAIL);
boolean hasHardFailReports = patchedNode.reports().getReports().stream()
.anyMatch(r -> r.getType() == Report.Type.HARD_FAIL);
if (hadHardFailReports != hasHardFailReports) {
if ((hasHardFailReports && node.state() == Node.State.failed) || node.state() == Node.State.parked)
return patchedNode;
patchedNode = patchedNode.with(patchedNode.status().withWantToDeprovision(hasHardFailReports));
}
return patchedNode;
}
private Set<String> asStringSet(Inspector field) {
if ( ! field.type().equals(Type.ARRAY))
throw new IllegalArgumentException("Expected an ARRAY value, got a " + field.type());
TreeSet<String> strings = new TreeSet<>();
for (int i = 0; i < field.entries(); i++) {
Inspector entry = field.entry(i);
if ( ! entry.type().equals(Type.STRING))
throw new IllegalArgumentException("Expected a STRING value, got a " + entry.type());
strings.add(entry.asString());
}
return strings;
}
private Node patchRequiredDiskSpeed(String value) {
Optional<Allocation> allocation = node.allocation();
if (allocation.isPresent())
return node.with(allocation.get().withRequestedResources(
allocation.get().requestedResources().with(NodeResources.DiskSpeed.valueOf(value))));
else
throw new IllegalArgumentException("Node is not allocated");
}
private Node patchCurrentRestartGeneration(Long value) {
Optional<Allocation> allocation = node.allocation();
if (allocation.isPresent())
return node.with(allocation.get().withRestart(allocation.get().restartGeneration().withCurrent(value)));
else
throw new IllegalArgumentException("Node is not allocated");
}
private Long asLong(Inspector field) {
if ( ! field.type().equals(Type.LONG))
throw new IllegalArgumentException("Expected a LONG value, got a " + field.type());
return field.asLong();
}
private String asString(Inspector field) {
if ( ! field.type().equals(Type.STRING))
throw new IllegalArgumentException("Expected a STRING value, got a " + field.type());
return field.asString();
}
private Optional<String> asOptionalString(Inspector field) {
return field.type().equals(Type.NIX) ? Optional.empty() : Optional.of(asString(field));
}
private Optional<String> removeQuotedNulls(Optional<String> value) {
return value.filter(v -> !v.equals("null"));
}
private boolean asBoolean(Inspector field) {
if ( ! field.type().equals(Type.BOOL))
throw new IllegalArgumentException("Expected a BOOL value, got a " + field.type());
return field.asBool();
}
} |
Shall we remove metric publishing from here and publish error scenario in response callback notify success method | private void dispatchWithDataBinding(ObjectValue msgObj, BType intendedType, byte[] data) {
try {
Object typeBoundData = bindDataToIntendedType(data, intendedType);
CountDownLatch countDownLatch = new CountDownLatch(1);
executeResource(msgObj, countDownLatch, typeBoundData);
countDownLatch.await();
} catch (NumberFormatException e) {
ErrorValue dataBindError = Utils
.createNatsError("The received message is unsupported by the resource signature");
ErrorHandler.dispatchError(serviceObject, msgObj, dataBindError, runtime, connectedUrl);
NatsMetricsUtil.reportConsumerError(connectedUrl, msgObj.getStringValue(Constants.SUBJECT),
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
} catch (ErrorValue e) {
ErrorHandler.dispatchError(serviceObject, msgObj, e, runtime, connectedUrl);
NatsMetricsUtil.reportConsumerError(connectedUrl, msgObj.getStringValue(Constants.SUBJECT),
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
NatsMetricsUtil.reportConsumerError(connectedUrl, msgObj.getStringValue(Constants.SUBJECT),
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
throw Utils.createNatsError(Constants.THREAD_INTERRUPTED_ERROR);
}
} | NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED); | private void dispatchWithDataBinding(ObjectValue msgObj, BType intendedType, byte[] data) {
try {
Object typeBoundData = bindDataToIntendedType(data, intendedType);
CountDownLatch countDownLatch = new CountDownLatch(1);
executeResource(msgObj, countDownLatch, typeBoundData);
countDownLatch.await();
} catch (NumberFormatException e) {
ErrorValue dataBindError = Utils
.createNatsError("The received message is unsupported by the resource signature");
ErrorHandler.dispatchError(serviceObject, msgObj, dataBindError, runtime, connectedUrl);
} catch (ErrorValue e) {
ErrorHandler.dispatchError(serviceObject, msgObj, e, runtime, connectedUrl);
} catch (InterruptedException e) {
NatsMetricsUtil.reportConsumerError(connectedUrl, msgObj.getStringValue(Constants.SUBJECT),
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
Thread.currentThread().interrupt();
throw Utils.createNatsError(Constants.THREAD_INTERRUPTED_ERROR);
}
} | class DefaultMessageHandler implements MessageHandler {
private ObjectValue serviceObject;
private String connectedUrl;
private BRuntime runtime;
DefaultMessageHandler(ObjectValue serviceObject, BRuntime runtime, String connectedUrl) {
this.serviceObject = serviceObject;
this.runtime = runtime;
this.connectedUrl = connectedUrl;
}
/**
* {@inheritDoc}
*/
@Override
public void onMessage(Message message) {
NatsMetricsUtil.reportConsume(connectedUrl, message.getSubject(), message.getData().length);
ArrayValue msgData = new ArrayValueImpl(message.getData());
ObjectValue msgObj = BallerinaValues.createObjectValue(Constants.NATS_PACKAGE_ID,
Constants.NATS_MESSAGE_OBJ_NAME, message.getSubject(), msgData, message.getReplyTo());
AttachedFunction onMessage = getAttachedFunction(serviceObject, ON_MESSAGE_RESOURCE);
BType[] parameterTypes = onMessage.getParameterType();
if (parameterTypes.length == 1) {
dispatch(msgObj);
} else {
BType intendedTypeForData = parameterTypes[1];
dispatchWithDataBinding(msgObj, intendedTypeForData, message.getData());
}
}
/**
* Dispatch only the message to the onMessage resource.
*
* @param msgObj Message object
*/
private void dispatch(ObjectValue msgObj) {
CountDownLatch countDownLatch = new CountDownLatch(1);
executeResource(msgObj, countDownLatch);
try {
countDownLatch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
NatsMetricsUtil.reportConsumerError(connectedUrl, msgObj.getStringValue(Constants.SUBJECT),
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
throw Utils.createNatsError(Constants.THREAD_INTERRUPTED_ERROR);
}
}
/**
* Dispatch message and type bound data to the onMessage resource.
*
* @param msgObj Message object
* @param intendedType Message type for data binding
* @param data Message data
*/
private void executeResource(ObjectValue msgObj, CountDownLatch countDownLatch) {
String subject = msgObj.getStringValue(Constants.SUBJECT);
if (ObserveUtils.isTracingEnabled()) {
Map<String, Object> properties = new HashMap<>();
NatsObserverContext observerContext = new NatsObserverContext(NatsObservabilityConstants.CONTEXT_CONSUMER,
connectedUrl,
msgObj.getStringValue(Constants.SUBJECT));
properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext);
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
properties, msgObj, Boolean.TRUE);
} else {
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
msgObj, Boolean.TRUE);
}
}
private void executeResource(ObjectValue msgObj, CountDownLatch countDownLatch, Object typeBoundData) {
String subject = msgObj.getStringValue(Constants.SUBJECT);
if (ObserveUtils.isTracingEnabled()) {
Map<String, Object> properties = new HashMap<>();
NatsObserverContext observerContext = new NatsObserverContext(NatsObservabilityConstants.CONTEXT_CONSUMER,
connectedUrl,
msgObj.getStringValue(Constants.SUBJECT));
properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext);
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
properties, msgObj, true, typeBoundData, true);
} else {
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
msgObj, true, typeBoundData, true);
}
}
/**
* Represents the callback which will be triggered upon submitting to resource.
*/
public static class ResponseCallback implements CallableUnitCallback {
private CountDownLatch countDownLatch;
private String subject;
private String connectedUrl;
ResponseCallback(CountDownLatch countDownLatch, String connectedUrl, String subject) {
this.countDownLatch = countDownLatch;
this.connectedUrl = connectedUrl;
this.subject = subject;
}
/**
* {@inheritDoc}
*/
@Override
public void notifySuccess() {
NatsMetricsUtil.reportDelivery(connectedUrl, subject);
countDownLatch.countDown();
}
/**
* {@inheritDoc}
*/
@Override
public void notifyFailure(ErrorValue error) {
ErrorHandlerUtils.printError(error);
NatsMetricsUtil.reportConsumerError(connectedUrl, subject,
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
countDownLatch.countDown();
}
}
} | class DefaultMessageHandler implements MessageHandler {
private ObjectValue serviceObject;
private String connectedUrl;
private BRuntime runtime;
DefaultMessageHandler(ObjectValue serviceObject, BRuntime runtime, String connectedUrl) {
this.serviceObject = serviceObject;
this.runtime = runtime;
this.connectedUrl = connectedUrl;
}
/**
* {@inheritDoc}
*/
@Override
public void onMessage(Message message) {
NatsMetricsUtil.reportConsume(connectedUrl, message.getSubject(), message.getData().length);
ArrayValue msgData = new ArrayValueImpl(message.getData());
ObjectValue msgObj = BallerinaValues.createObjectValue(Constants.NATS_PACKAGE_ID,
Constants.NATS_MESSAGE_OBJ_NAME, message.getSubject(), msgData, message.getReplyTo());
AttachedFunction onMessage = getAttachedFunction(serviceObject, ON_MESSAGE_RESOURCE);
BType[] parameterTypes = onMessage.getParameterType();
if (parameterTypes.length == 1) {
dispatch(msgObj);
} else {
BType intendedTypeForData = parameterTypes[1];
dispatchWithDataBinding(msgObj, intendedTypeForData, message.getData());
}
}
/**
* Dispatch only the message to the onMessage resource.
*
* @param msgObj Message object
*/
private void dispatch(ObjectValue msgObj) {
CountDownLatch countDownLatch = new CountDownLatch(1);
executeResource(msgObj, countDownLatch);
try {
countDownLatch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
NatsMetricsUtil.reportConsumerError(connectedUrl, msgObj.getStringValue(Constants.SUBJECT),
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
throw Utils.createNatsError(Constants.THREAD_INTERRUPTED_ERROR);
}
}
/**
* Dispatch message and type bound data to the onMessage resource.
*
* @param msgObj Message object
* @param intendedType Message type for data binding
* @param data Message data
*/
private void executeResource(ObjectValue msgObj, CountDownLatch countDownLatch) {
String subject = msgObj.getStringValue(Constants.SUBJECT);
if (ObserveUtils.isTracingEnabled()) {
Map<String, Object> properties = new HashMap<>();
NatsObserverContext observerContext = new NatsObserverContext(NatsObservabilityConstants.CONTEXT_CONSUMER,
connectedUrl,
msgObj.getStringValue(Constants.SUBJECT));
properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext);
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
properties, msgObj, Boolean.TRUE);
} else {
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
msgObj, Boolean.TRUE);
}
}
private void executeResource(ObjectValue msgObj, CountDownLatch countDownLatch, Object typeBoundData) {
String subject = msgObj.getStringValue(Constants.SUBJECT);
if (ObserveUtils.isTracingEnabled()) {
Map<String, Object> properties = new HashMap<>();
NatsObserverContext observerContext = new NatsObserverContext(NatsObservabilityConstants.CONTEXT_CONSUMER,
connectedUrl,
msgObj.getStringValue(Constants.SUBJECT));
properties.put(ObservabilityConstants.KEY_OBSERVER_CONTEXT, observerContext);
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
properties, msgObj, true, typeBoundData, true);
} else {
runtime.invokeMethodAsync(serviceObject, ON_MESSAGE_RESOURCE,
new ResponseCallback(countDownLatch, connectedUrl, subject),
msgObj, true, typeBoundData, true);
}
}
/**
* Represents the callback which will be triggered upon submitting to resource.
*/
public static class ResponseCallback implements CallableUnitCallback {
private CountDownLatch countDownLatch;
private String subject;
private String connectedUrl;
ResponseCallback(CountDownLatch countDownLatch, String connectedUrl, String subject) {
this.countDownLatch = countDownLatch;
this.connectedUrl = connectedUrl;
this.subject = subject;
}
/**
* {@inheritDoc}
*/
@Override
public void notifySuccess() {
NatsMetricsUtil.reportDelivery(connectedUrl, subject);
countDownLatch.countDown();
}
/**
* {@inheritDoc}
*/
@Override
public void notifyFailure(ErrorValue error) {
ErrorHandlerUtils.printError(error);
NatsMetricsUtil.reportConsumerError(connectedUrl, subject,
NatsObservabilityConstants.ERROR_TYPE_MSG_RECEIVED);
countDownLatch.countDown();
}
}
} |
Isn't this provided to the environment through the provision endpoint? | public RemoteEnvironment createEnvironment(Environment environment) throws Exception {
Preconditions.checkState(
environment
.getUrn()
.equals(BeamUrns.getUrn(RunnerApi.StandardEnvironments.Environments.DOCKER)),
"The passed environment does not contain a DockerPayload.");
final RunnerApi.DockerPayload dockerPayload =
RunnerApi.DockerPayload.parseFrom(environment.getPayload());
final String workerId = idGenerator.getId();
String containerImage = dockerPayload.getContainerImage();
String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl();
String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl();
String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl();
String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl();
ImmutableList.Builder<String> dockerOptsBuilder =
ImmutableList.<String>builder()
.addAll(gcsCredentialArgs())
.add("--network=host")
.add("--env=DOCKER_MAC_CONTAINER=" + System.getenv("DOCKER_MAC_CONTAINER"));
Boolean retainDockerContainer =
pipelineOptions.as(ManualDockerEnvironmentOptions.class).getRetainDockerContainers();
if (!retainDockerContainer) {
dockerOptsBuilder.add("--rm");
}
String semiPersistDir = pipelineOptions.as(RemoteEnvironmentOptions.class).getSemiPersistDir();
ImmutableList.Builder<String> argsBuilder =
ImmutableList.<String>builder()
.add(String.format("--id=%s", workerId))
.add(String.format("--logging_endpoint=%s", loggingEndpoint))
.add(String.format("--artifact_endpoint=%s", artifactEndpoint))
.add(String.format("--provision_endpoint=%s", provisionEndpoint))
.add(String.format("--control_endpoint=%s", controlEndpoint));
if (semiPersistDir != null) {
argsBuilder.add(String.format("--semi_persist_dir=%s", semiPersistDir));
}
LOG.debug("Creating Docker Container with ID {}", workerId);
String containerId = null;
InstructionRequestHandler instructionHandler = null;
try {
containerId = docker.runImage(containerImage, dockerOptsBuilder.build(), argsBuilder.build());
LOG.debug("Created Docker Container with Container ID {}", containerId);
try {
instructionHandler = clientSource.take(workerId, Duration.ofMinutes(1));
} catch (TimeoutException timeoutEx) {
RuntimeException runtimeException =
new RuntimeException(
String.format(
"Docker container %s failed to start up successfully within 1 minute.",
containerImage),
timeoutEx);
try {
String containerLogs = docker.getContainerLogs(containerId);
LOG.error("Docker container {} logs:\n{}", containerId, containerLogs);
} catch (Exception getLogsException) {
runtimeException.addSuppressed(getLogsException);
}
throw runtimeException;
} catch (InterruptedException interruptEx) {
Thread.currentThread().interrupt();
throw new RuntimeException(interruptEx);
}
} catch (Exception e) {
if (containerId != null) {
try {
docker.killContainer(containerId);
if (!retainDockerContainer) {
docker.removeContainer(containerId);
}
} catch (Exception dockerException) {
e.addSuppressed(dockerException);
}
}
throw e;
}
return DockerContainerEnvironment.create(
docker, environment, containerId, instructionHandler, retainDockerContainer);
} | if (semiPersistDir != null) { | public RemoteEnvironment createEnvironment(Environment environment) throws Exception {
Preconditions.checkState(
environment
.getUrn()
.equals(BeamUrns.getUrn(RunnerApi.StandardEnvironments.Environments.DOCKER)),
"The passed environment does not contain a DockerPayload.");
final RunnerApi.DockerPayload dockerPayload =
RunnerApi.DockerPayload.parseFrom(environment.getPayload());
final String workerId = idGenerator.getId();
String containerImage = dockerPayload.getContainerImage();
String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl();
String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl();
String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl();
String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl();
ImmutableList.Builder<String> dockerOptsBuilder =
ImmutableList.<String>builder()
.addAll(gcsCredentialArgs())
.add("--network=host")
.add("--env=DOCKER_MAC_CONTAINER=" + System.getenv("DOCKER_MAC_CONTAINER"));
Boolean retainDockerContainer =
pipelineOptions.as(ManualDockerEnvironmentOptions.class).getRetainDockerContainers();
if (!retainDockerContainer) {
dockerOptsBuilder.add("--rm");
}
String semiPersistDir = pipelineOptions.as(RemoteEnvironmentOptions.class).getSemiPersistDir();
ImmutableList.Builder<String> argsBuilder =
ImmutableList.<String>builder()
.add(String.format("--id=%s", workerId))
.add(String.format("--logging_endpoint=%s", loggingEndpoint))
.add(String.format("--artifact_endpoint=%s", artifactEndpoint))
.add(String.format("--provision_endpoint=%s", provisionEndpoint))
.add(String.format("--control_endpoint=%s", controlEndpoint));
if (semiPersistDir != null) {
argsBuilder.add(String.format("--semi_persist_dir=%s", semiPersistDir));
}
LOG.debug("Creating Docker Container with ID {}", workerId);
String containerId = null;
InstructionRequestHandler instructionHandler = null;
try {
containerId = docker.runImage(containerImage, dockerOptsBuilder.build(), argsBuilder.build());
LOG.debug("Created Docker Container with Container ID {}", containerId);
try {
instructionHandler = clientSource.take(workerId, Duration.ofMinutes(1));
} catch (TimeoutException timeoutEx) {
RuntimeException runtimeException =
new RuntimeException(
String.format(
"Docker container %s failed to start up successfully within 1 minute.",
containerImage),
timeoutEx);
try {
String containerLogs = docker.getContainerLogs(containerId);
LOG.error("Docker container {} logs:\n{}", containerId, containerLogs);
} catch (Exception getLogsException) {
runtimeException.addSuppressed(getLogsException);
}
throw runtimeException;
} catch (InterruptedException interruptEx) {
Thread.currentThread().interrupt();
throw new RuntimeException(interruptEx);
}
} catch (Exception e) {
if (containerId != null) {
try {
docker.killContainer(containerId);
if (!retainDockerContainer) {
docker.removeContainer(containerId);
}
} catch (Exception dockerException) {
e.addSuppressed(dockerException);
}
}
throw e;
}
return DockerContainerEnvironment.create(
docker, environment, containerId, instructionHandler, retainDockerContainer);
} | class DockerEnvironmentFactory implements EnvironmentFactory {
private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class);
static DockerEnvironmentFactory forServicesWithDocker(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool.Source clientSource,
IdGenerator idGenerator,
PipelineOptions pipelineOptions) {
return new DockerEnvironmentFactory(
docker,
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
idGenerator,
clientSource,
pipelineOptions);
}
private final DockerCommand docker;
private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer;
private final GrpcFnServer<GrpcLoggingService> loggingServiceServer;
private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer;
private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer;
private final IdGenerator idGenerator;
private final ControlClientPool.Source clientSource;
private final PipelineOptions pipelineOptions;
private DockerEnvironmentFactory(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
IdGenerator idGenerator,
ControlClientPool.Source clientSource,
PipelineOptions pipelineOptions) {
this.docker = docker;
this.controlServiceServer = controlServiceServer;
this.loggingServiceServer = loggingServiceServer;
this.retrievalServiceServer = retrievalServiceServer;
this.provisioningServiceServer = provisioningServiceServer;
this.idGenerator = idGenerator;
this.clientSource = clientSource;
this.pipelineOptions = pipelineOptions;
}
/** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */
@Override
private List<String> gcsCredentialArgs() {
String dockerGcloudConfig = "/root/.config/gcloud";
String localGcloudConfig =
firstNonNull(
System.getenv("CLOUDSDK_CONFIG"),
Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString());
if (Files.exists(Paths.get(localGcloudConfig))) {
return ImmutableList.of(
"--mount",
String.format("type=bind,src=%s,dst=%s", localGcloudConfig, dockerGcloudConfig));
} else {
return ImmutableList.of();
}
}
/**
* NOTE: Deployment on Macs is intended for local development. As of 18.03, Docker-for-Mac does
* not implement host networking (--networking=host is effectively a no-op). Instead, we use a
* special DNS entry that points to the host:
* https:
* hostname has historically changed between versions, so this is subject to breakages and will
* likely only support the latest version at any time.
*/
static class DockerOnMac {
private static final String DOCKER_FOR_MAC_HOST = "host.docker.internal";
private static final boolean RUNNING_INSIDE_DOCKER_ON_MAC =
"1".equals(System.getenv("DOCKER_MAC_CONTAINER"));
private static final int MAC_PORT_START = 8100;
private static final int MAC_PORT_END = 8200;
private static final AtomicInteger MAC_PORT = new AtomicInteger(MAC_PORT_START);
static ServerFactory getServerFactory() {
ServerFactory.UrlFactory dockerUrlFactory =
(host, port) -> HostAndPort.fromParts(DOCKER_FOR_MAC_HOST, port).toString();
if (RUNNING_INSIDE_DOCKER_ON_MAC) {
return ServerFactory.createWithUrlFactoryAndPortSupplier(
dockerUrlFactory,
() -> MAC_PORT.getAndUpdate(val -> val == MAC_PORT_END ? MAC_PORT_START : val + 1));
} else {
return ServerFactory.createWithUrlFactory(dockerUrlFactory);
}
}
}
/** Provider for DockerEnvironmentFactory. */
public static class Provider implements EnvironmentFactory.Provider {
private final PipelineOptions pipelineOptions;
public Provider(PipelineOptions options) {
this.pipelineOptions = options;
}
@Override
public EnvironmentFactory createEnvironmentFactory(
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool clientPool,
IdGenerator idGenerator) {
return DockerEnvironmentFactory.forServicesWithDocker(
DockerCommand.getDefault(),
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
clientPool.getSource(),
idGenerator,
pipelineOptions);
}
@Override
public ServerFactory getServerFactory() {
switch (getPlatform()) {
case LINUX:
return ServerFactory.createDefault();
case MAC:
return DockerOnMac.getServerFactory();
default:
LOG.warn("Unknown Docker platform. Falling back to default server factory");
return ServerFactory.createDefault();
}
}
private static Platform getPlatform() {
String osName = System.getProperty("os.name").toLowerCase();
if (osName.startsWith("mac") || DockerOnMac.RUNNING_INSIDE_DOCKER_ON_MAC) {
return Platform.MAC;
} else if (osName.startsWith("linux")) {
return Platform.LINUX;
}
return Platform.OTHER;
}
private enum Platform {
MAC,
LINUX,
OTHER,
}
}
} | class DockerEnvironmentFactory implements EnvironmentFactory {
private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class);
static DockerEnvironmentFactory forServicesWithDocker(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool.Source clientSource,
IdGenerator idGenerator,
PipelineOptions pipelineOptions) {
return new DockerEnvironmentFactory(
docker,
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
idGenerator,
clientSource,
pipelineOptions);
}
private final DockerCommand docker;
private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer;
private final GrpcFnServer<GrpcLoggingService> loggingServiceServer;
private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer;
private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer;
private final IdGenerator idGenerator;
private final ControlClientPool.Source clientSource;
private final PipelineOptions pipelineOptions;
private DockerEnvironmentFactory(
DockerCommand docker,
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
IdGenerator idGenerator,
ControlClientPool.Source clientSource,
PipelineOptions pipelineOptions) {
this.docker = docker;
this.controlServiceServer = controlServiceServer;
this.loggingServiceServer = loggingServiceServer;
this.retrievalServiceServer = retrievalServiceServer;
this.provisioningServiceServer = provisioningServiceServer;
this.idGenerator = idGenerator;
this.clientSource = clientSource;
this.pipelineOptions = pipelineOptions;
}
/** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */
@Override
private List<String> gcsCredentialArgs() {
String dockerGcloudConfig = "/root/.config/gcloud";
String localGcloudConfig =
firstNonNull(
System.getenv("CLOUDSDK_CONFIG"),
Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString());
if (Files.exists(Paths.get(localGcloudConfig))) {
return ImmutableList.of(
"--mount",
String.format("type=bind,src=%s,dst=%s", localGcloudConfig, dockerGcloudConfig));
} else {
return ImmutableList.of();
}
}
/**
* NOTE: Deployment on Macs is intended for local development. As of 18.03, Docker-for-Mac does
* not implement host networking (--networking=host is effectively a no-op). Instead, we use a
* special DNS entry that points to the host:
* https:
* hostname has historically changed between versions, so this is subject to breakages and will
* likely only support the latest version at any time.
*/
static class DockerOnMac {
private static final String DOCKER_FOR_MAC_HOST = "host.docker.internal";
private static final boolean RUNNING_INSIDE_DOCKER_ON_MAC =
"1".equals(System.getenv("DOCKER_MAC_CONTAINER"));
private static final int MAC_PORT_START = 8100;
private static final int MAC_PORT_END = 8200;
private static final AtomicInteger MAC_PORT = new AtomicInteger(MAC_PORT_START);
static ServerFactory getServerFactory() {
ServerFactory.UrlFactory dockerUrlFactory =
(host, port) -> HostAndPort.fromParts(DOCKER_FOR_MAC_HOST, port).toString();
if (RUNNING_INSIDE_DOCKER_ON_MAC) {
return ServerFactory.createWithUrlFactoryAndPortSupplier(
dockerUrlFactory,
() -> MAC_PORT.getAndUpdate(val -> val == MAC_PORT_END ? MAC_PORT_START : val + 1));
} else {
return ServerFactory.createWithUrlFactory(dockerUrlFactory);
}
}
}
/** Provider for DockerEnvironmentFactory. */
public static class Provider implements EnvironmentFactory.Provider {
private final PipelineOptions pipelineOptions;
public Provider(PipelineOptions options) {
this.pipelineOptions = options;
}
@Override
public EnvironmentFactory createEnvironmentFactory(
GrpcFnServer<FnApiControlClientPoolService> controlServiceServer,
GrpcFnServer<GrpcLoggingService> loggingServiceServer,
GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer,
GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer,
ControlClientPool clientPool,
IdGenerator idGenerator) {
return DockerEnvironmentFactory.forServicesWithDocker(
DockerCommand.getDefault(),
controlServiceServer,
loggingServiceServer,
retrievalServiceServer,
provisioningServiceServer,
clientPool.getSource(),
idGenerator,
pipelineOptions);
}
@Override
public ServerFactory getServerFactory() {
switch (getPlatform()) {
case LINUX:
return ServerFactory.createDefault();
case MAC:
return DockerOnMac.getServerFactory();
default:
LOG.warn("Unknown Docker platform. Falling back to default server factory");
return ServerFactory.createDefault();
}
}
private static Platform getPlatform() {
String osName = System.getProperty("os.name").toLowerCase();
if (osName.startsWith("mac") || DockerOnMac.RUNNING_INSIDE_DOCKER_ON_MAC) {
return Platform.MAC;
} else if (osName.startsWith("linux")) {
return Platform.LINUX;
}
return Platform.OTHER;
}
private enum Platform {
MAC,
LINUX,
OTHER,
}
}
} |
I'm wondering whether we should use `CompletableFuture` here, instead. That way, we could collect any `RuntimeException` (which would be otherwise swallowed by the concurrent execution). WDYT? :thinking: | void testCancelExecutionInteractiveMode() throws Exception {
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(), true);
Path historyFilePath = historyTempFile();
InputStream inputStream = new ByteArrayInputStream("SELECT 1;\nHELP;\n ".getBytes());
OutputStream outputStream = new ByteArrayOutputStream(248);
CountDownLatch closedLatch = new CountDownLatch(1);
try (Terminal terminal = TerminalUtils.createDumbTerminal(inputStream, outputStream);
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyFilePath, null)) {
Thread thread =
new Thread(
() -> {
try {
client.executeInInteractiveMode();
} catch (Exception ignore) {
} finally {
closedLatch.countDown();
}
});
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
terminal.raise(Terminal.Signal.INT);
CommonTestUtils.waitUntilCondition(
() -> outputStream.toString().contains(CliStrings.MESSAGE_HELP));
closedLatch.await(30, TimeUnit.SECONDS);
}
} | } finally { | void testCancelExecutionInteractiveMode() throws Exception {
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(), true);
Path historyFilePath = historyTempFile();
InputStream inputStream = new ByteArrayInputStream("SELECT 1;\nHELP;\n ".getBytes());
OutputStream outputStream = new ByteArrayOutputStream(248);
try (Terminal terminal = TerminalUtils.createDumbTerminal(inputStream, outputStream);
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyFilePath, null)) {
CheckedThread thread =
new CheckedThread() {
@Override
public void go() {
client.executeInInteractiveMode();
}
};
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
terminal.raise(Terminal.Signal.INT);
CommonTestUtils.waitUntilCondition(
() -> outputStream.toString().contains(CliStrings.MESSAGE_HELP));
thread.sync();
}
} | class CliClientTest {
private static final String INSERT_INTO_STATEMENT =
"INSERT INTO MyTable SELECT * FROM MyOtherTable";
private static final String INSERT_OVERWRITE_STATEMENT =
"INSERT OVERWRITE MyTable SELECT * FROM MyOtherTable";
private static final String ORIGIN_SQL = "SELECT pos\t FROM source_table;\n";
private static final String SQL_WITHOUT_COMPLETER = "SELECT pos FROM source_table;";
private static final String SQL_WITH_COMPLETER = "SELECT POSITION FROM source_table;";
@Test
void testUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, false, false);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, false, false);
}
@Test
void testFailedUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, true, true);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, true, true);
}
@Test
void testExecuteSqlFile() throws Exception {
MockExecutor executor = new MockExecutor();
executeSqlFromContent(
executor,
String.join(
";\n",
Arrays.asList(
INSERT_INTO_STATEMENT, "", INSERT_OVERWRITE_STATEMENT, "\n")));
assertThat(executor.receivedStatement).contains(INSERT_OVERWRITE_STATEMENT);
}
@Test
void testExecuteSqlFileWithoutSqlCompleter() throws Exception {
MockExecutor executor = new MockExecutor(new SqlParserHelper(), false);
executeSqlFromContent(executor, ORIGIN_SQL);
assertThat(executor.receivedStatement).contains(SQL_WITHOUT_COMPLETER);
}
@Test
void testExecuteSqlInteractiveWithSqlCompleter() throws Exception {
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(), false);
InputStream inputStream = new ByteArrayInputStream(ORIGIN_SQL.getBytes());
OutputStream outputStream = new ByteArrayOutputStream(256);
try (Terminal terminal = new DumbTerminal(inputStream, outputStream);
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyTempFile(), null)) {
client.executeInInteractiveMode();
assertThat(mockExecutor.receivedStatement).contains(SQL_WITH_COMPLETER);
}
}
@Test
void testSqlCompletion() throws IOException {
verifySqlCompletion("", 0, Arrays.asList("CLEAR", "HELP", "EXIT", "QUIT", "RESET", "SET"));
verifySqlCompletion("SELE", 4, Collections.emptyList());
verifySqlCompletion("QU", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion(" qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("set ", 3, Collections.emptyList());
verifySqlCompletion("show t ", 6, Collections.emptyList());
verifySqlCompletion("show ", 4, Collections.emptyList());
verifySqlCompletion("show modules", 12, Collections.emptyList());
}
@Test
void testHistoryFile() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
InputStream inputStream = new ByteArrayInputStream("help;\nuse catalog cat;\n".getBytes());
Path historyFilePath = historyTempFile();
try (Terminal terminal =
new DumbTerminal(inputStream, new TerminalUtils.MockOutputStream());
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyFilePath, null)) {
client.executeInInteractiveMode();
List<String> content = Files.readAllLines(historyFilePath);
assertThat(content).hasSize(2);
assertThat(content.get(0)).contains("help");
assertThat(content.get(1)).contains("use catalog cat");
}
}
@Test
void testGetEOFinNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList("DESC MyOtherTable;", "SHOW TABLES");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(mockExecutor.receivedStatement).contains(statements.get(1));
}
@Test
void testUnknownStatementInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"ERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
void testFailedExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = true;
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
void testIllegalResultModeInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"SELECT * FROM MyOtherTable;",
"HELP;",
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
String output = executeSqlFromContent(mockExecutor, content);
assertThat(output)
.contains(
"In non-interactive mode, it only supports to use TABLEAU as value of "
+ "sql-client.execution.result-mode when execute query. Please add "
+ "'SET sql-client.execution.result-mode=TABLEAU;' in the sql file.");
}
@Test
void testCancelExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"HELP;",
"CREATE TABLE tbl( -- comment\n"
+ "-- comment with ;\n"
+ "id INT,\n"
+ "name STRING\n"
+ ") WITH (\n"
+ " 'connector' = 'values'\n"
+ ");\n",
"INSERT INTO \n" + "MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;",
"QUIT;\n");
final int hookIndex = 2;
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(), true);
Path historyFilePath = historyTempFile();
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
mockExecutor,
historyFilePath,
null)) {
Thread thread = new Thread(() -> client.executeInNonInteractiveMode(content));
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
thread.interrupt();
while (thread.isAlive()) {
Thread.sleep(10);
}
assertThat(outputStream.toString())
.contains("java.lang.InterruptedException: sleep interrupted");
}
assertThat(statements.get(hookIndex)).isEqualTo(mockExecutor.receivedStatement.trim());
}
@Test
private void verifyUpdateSubmission(
String statement, boolean failExecution, boolean testFailure) throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = failExecution;
String result = executeSqlFromContent(mockExecutor, statement);
if (testFailure) {
assertThat(result).contains(MESSAGE_SQL_EXECUTION_ERROR);
} else {
assertThat(result).doesNotContain(MESSAGE_SQL_EXECUTION_ERROR);
assertThat(SqlMultiLineParser.formatSqlFile(statement))
.isEqualTo(SqlMultiLineParser.formatSqlFile(mockExecutor.receivedStatement));
}
}
private void verifySqlCompletion(String statement, int position, List<String> expectedHints)
throws IOException {
final MockExecutor mockExecutor = new MockExecutor();
final SqlCompleter completer = new SqlCompleter(mockExecutor);
final SqlMultiLineParser parser =
new SqlMultiLineParser(
new SqlCommandParserImpl(),
mockExecutor,
CliClient.ExecutionMode.INTERACTIVE_EXECUTION);
try (Terminal terminal = TerminalUtils.createDumbTerminal()) {
final LineReader reader = LineReaderBuilder.builder().terminal(terminal).build();
final ParsedLine parsedLine =
parser.parse(statement, position, Parser.ParseContext.COMPLETE);
final List<Candidate> candidates = new ArrayList<>();
final List<String> results = new ArrayList<>();
completer.complete(reader, parsedLine, candidates);
candidates.forEach(item -> results.add(item.value()));
assertThat(results.containsAll(expectedHints)).isTrue();
assertThat(statement).isEqualTo(mockExecutor.receivedStatement);
assertThat(position).isEqualTo(mockExecutor.receivedPosition);
}
}
private Path historyTempFile() throws IOException {
return File.createTempFile("history", "tmp").toPath();
}
private String executeSqlFromContent(MockExecutor executor, String content) throws IOException {
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
executor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(content);
}
return outputStream.toString();
}
private static class MockExecutor implements Executor {
public boolean failExecution;
public volatile boolean isSync;
public volatile boolean isAwait = false;
public String receivedStatement;
public int receivedPosition;
public final Configuration configuration;
private final SqlParserHelper helper;
public MockExecutor() {
this(new SqlParserHelper(), false);
}
public MockExecutor(SqlParserHelper helper, boolean isSync) {
this.helper = helper;
this.configuration = new Configuration();
this.isSync = isSync;
configuration.set(TABLE_DML_SYNC, isSync);
helper.registerTables();
}
@Override
public void configureSession(String statement) {}
@Override
public ReadableConfig getSessionConfig() {
return configuration;
}
@Override
public Map<String, String> getSessionConfigMap() {
return configuration.toMap();
}
@Override
public StatementResult executeStatement(String statement) {
receivedStatement = statement;
if (failExecution) {
throw new SqlExecutionException("Fail execution.");
}
Operation operation;
try {
operation = helper.getSqlParser().parse(statement).get(0);
} catch (Exception e) {
throw new SqlExecutionException("Failed to parse statement.", e);
}
if (operation instanceof ModifyOperation || operation instanceof QueryOperation) {
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return new StatementResult(
ResolvedSchema.of(Column.physical("result", DataTypes.BIGINT())),
CloseableIterator.adapterForIterator(
Collections.singletonList((RowData) GenericRowData.of(-1L))
.iterator()),
operation instanceof QueryOperation,
ResultKind.SUCCESS_WITH_CONTENT,
JobID.generate(),
SIMPLE_ROW_DATA_TO_STRING_CONVERTER);
} else {
return new StatementResult(
TableResultImpl.TABLE_RESULT_OK.getResolvedSchema(),
TableResultImpl.TABLE_RESULT_OK.collectInternal(),
false,
ResultKind.SUCCESS,
null);
}
}
@Override
public List<String> completeStatement(String statement, int position) {
receivedStatement = statement;
receivedPosition = position;
return Arrays.asList(helper.getSqlParser().getCompletionHints(statement, position));
}
@Override
public void close() {
}
}
} | class CliClientTest {
private static final String INSERT_INTO_STATEMENT =
"INSERT INTO MyTable SELECT * FROM MyOtherTable";
private static final String INSERT_OVERWRITE_STATEMENT =
"INSERT OVERWRITE MyTable SELECT * FROM MyOtherTable";
private static final String ORIGIN_SQL = "SELECT pos\t FROM source_table;\n";
private static final String SQL_WITHOUT_COMPLETER = "SELECT pos FROM source_table;";
private static final String SQL_WITH_COMPLETER = "SELECT POSITION FROM source_table;";
@Test
void testUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, false, false);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, false, false);
}
@Test
void testFailedUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, true, true);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, true, true);
}
@Test
void testExecuteSqlFile() throws Exception {
MockExecutor executor = new MockExecutor();
executeSqlFromContent(
executor,
String.join(
";\n",
Arrays.asList(
INSERT_INTO_STATEMENT, "", INSERT_OVERWRITE_STATEMENT, "\n")));
assertThat(executor.receivedStatement).contains(INSERT_OVERWRITE_STATEMENT);
}
@Test
void testExecuteSqlFileWithoutSqlCompleter() throws Exception {
MockExecutor executor = new MockExecutor(new SqlParserHelper(), false);
executeSqlFromContent(executor, ORIGIN_SQL);
assertThat(executor.receivedStatement).contains(SQL_WITHOUT_COMPLETER);
}
@Test
void testExecuteSqlInteractiveWithSqlCompleter() throws Exception {
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(), false);
InputStream inputStream = new ByteArrayInputStream(ORIGIN_SQL.getBytes());
OutputStream outputStream = new ByteArrayOutputStream(256);
try (Terminal terminal = new DumbTerminal(inputStream, outputStream);
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyTempFile(), null)) {
client.executeInInteractiveMode();
assertThat(mockExecutor.receivedStatement).contains(SQL_WITH_COMPLETER);
}
}
@Test
void testSqlCompletion() throws IOException {
verifySqlCompletion("", 0, Arrays.asList("CLEAR", "HELP", "EXIT", "QUIT", "RESET", "SET"));
verifySqlCompletion("SELE", 4, Collections.emptyList());
verifySqlCompletion("QU", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion(" qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("set ", 3, Collections.emptyList());
verifySqlCompletion("show t ", 6, Collections.emptyList());
verifySqlCompletion("show ", 4, Collections.emptyList());
verifySqlCompletion("show modules", 12, Collections.emptyList());
}
@Test
void testHistoryFile() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
InputStream inputStream = new ByteArrayInputStream("help;\nuse catalog cat;\n".getBytes());
Path historyFilePath = historyTempFile();
try (Terminal terminal =
new DumbTerminal(inputStream, new TerminalUtils.MockOutputStream());
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyFilePath, null)) {
client.executeInInteractiveMode();
List<String> content = Files.readAllLines(historyFilePath);
assertThat(content).hasSize(2);
assertThat(content.get(0)).contains("help");
assertThat(content.get(1)).contains("use catalog cat");
}
}
@Test
void testGetEOFinNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList("DESC MyOtherTable;", "SHOW TABLES");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(mockExecutor.receivedStatement).contains(statements.get(1));
}
@Test
void testUnknownStatementInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"ERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
void testFailedExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = true;
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
void testIllegalResultModeInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"SELECT * FROM MyOtherTable;",
"HELP;",
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
String output = executeSqlFromContent(mockExecutor, content);
assertThat(output)
.contains(
"In non-interactive mode, it only supports to use TABLEAU as value of "
+ "sql-client.execution.result-mode when execute query. Please add "
+ "'SET sql-client.execution.result-mode=TABLEAU;' in the sql file.");
}
@Test
void testCancelExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"HELP;",
"CREATE TABLE tbl( -- comment\n"
+ "-- comment with ;\n"
+ "id INT,\n"
+ "name STRING\n"
+ ") WITH (\n"
+ " 'connector' = 'values'\n"
+ ");\n",
"INSERT INTO \n" + "MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;",
"QUIT;\n");
final int hookIndex = 2;
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(), true);
Path historyFilePath = historyTempFile();
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
mockExecutor,
historyFilePath,
null)) {
Thread thread = new Thread(() -> client.executeInNonInteractiveMode(content));
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
thread.interrupt();
while (thread.isAlive()) {
Thread.sleep(10);
}
assertThat(outputStream.toString())
.contains("java.lang.InterruptedException: sleep interrupted");
}
assertThat(statements.get(hookIndex)).isEqualTo(mockExecutor.receivedStatement.trim());
}
@Test
private void verifyUpdateSubmission(
String statement, boolean failExecution, boolean testFailure) throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = failExecution;
String result = executeSqlFromContent(mockExecutor, statement);
if (testFailure) {
assertThat(result).contains(MESSAGE_SQL_EXECUTION_ERROR);
} else {
assertThat(result).doesNotContain(MESSAGE_SQL_EXECUTION_ERROR);
assertThat(SqlMultiLineParser.formatSqlFile(statement))
.isEqualTo(SqlMultiLineParser.formatSqlFile(mockExecutor.receivedStatement));
}
}
private void verifySqlCompletion(String statement, int position, List<String> expectedHints)
throws IOException {
final MockExecutor mockExecutor = new MockExecutor();
final SqlCompleter completer = new SqlCompleter(mockExecutor);
final SqlMultiLineParser parser =
new SqlMultiLineParser(
new SqlCommandParserImpl(),
mockExecutor,
CliClient.ExecutionMode.INTERACTIVE_EXECUTION);
try (Terminal terminal = TerminalUtils.createDumbTerminal()) {
final LineReader reader = LineReaderBuilder.builder().terminal(terminal).build();
final ParsedLine parsedLine =
parser.parse(statement, position, Parser.ParseContext.COMPLETE);
final List<Candidate> candidates = new ArrayList<>();
final List<String> results = new ArrayList<>();
completer.complete(reader, parsedLine, candidates);
candidates.forEach(item -> results.add(item.value()));
assertThat(results.containsAll(expectedHints)).isTrue();
assertThat(statement).isEqualTo(mockExecutor.receivedStatement);
assertThat(position).isEqualTo(mockExecutor.receivedPosition);
}
}
private Path historyTempFile() throws IOException {
return File.createTempFile("history", "tmp").toPath();
}
private String executeSqlFromContent(MockExecutor executor, String content) throws IOException {
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
executor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(content);
}
return outputStream.toString();
}
private static class MockExecutor implements Executor {
public boolean failExecution;
public volatile boolean isSync;
public volatile boolean isAwait = false;
public String receivedStatement;
public int receivedPosition;
public final Configuration configuration;
private final SqlParserHelper helper;
public MockExecutor() {
this(new SqlParserHelper(), false);
}
public MockExecutor(SqlParserHelper helper, boolean isSync) {
this.helper = helper;
this.configuration = new Configuration();
this.isSync = isSync;
configuration.set(TABLE_DML_SYNC, isSync);
helper.registerTables();
}
@Override
public void configureSession(String statement) {}
@Override
public ReadableConfig getSessionConfig() {
return configuration;
}
@Override
public Map<String, String> getSessionConfigMap() {
return configuration.toMap();
}
@Override
public StatementResult executeStatement(String statement) {
receivedStatement = statement;
if (failExecution) {
throw new SqlExecutionException("Fail execution.");
}
Operation operation;
try {
operation = helper.getSqlParser().parse(statement).get(0);
} catch (Exception e) {
throw new SqlExecutionException("Failed to parse statement.", e);
}
if (operation instanceof ModifyOperation || operation instanceof QueryOperation) {
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return new StatementResult(
ResolvedSchema.of(Column.physical("result", DataTypes.BIGINT())),
CloseableIterator.adapterForIterator(
Collections.singletonList((RowData) GenericRowData.of(-1L))
.iterator()),
operation instanceof QueryOperation,
ResultKind.SUCCESS_WITH_CONTENT,
JobID.generate(),
SIMPLE_ROW_DATA_TO_STRING_CONVERTER);
} else {
return new StatementResult(
TableResultImpl.TABLE_RESULT_OK.getResolvedSchema(),
TableResultImpl.TABLE_RESULT_OK.collectInternal(),
false,
ResultKind.SUCCESS,
null);
}
}
@Override
public List<String> completeStatement(String statement, int position) {
receivedStatement = statement;
receivedPosition = position;
return Arrays.asList(helper.getSqlParser().getCompletionHints(statement, position));
}
@Override
public void close() {
}
}
} |
Can we make this initilization as a default approach, rather than in an else block | public void visit(BLangTypeDefinition astTypeDefinition) {
Visibility visibility = getVisibility(astTypeDefinition.symbol);
List<BIRFunction> attachedFuncs;
int typeTag = astTypeDefinition.symbol.type.tag;
if (typeTag == TypeTags.OBJECT || typeTag == TypeTags.RECORD) {
attachedFuncs = new ArrayList<>();
} else {
attachedFuncs = null;
}
BIRTypeDefinition typeDef = new BIRTypeDefinition(astTypeDefinition.pos,
astTypeDefinition.symbol.name,
visibility,
astTypeDefinition.typeNode.type,
attachedFuncs);
typeDefs.put(astTypeDefinition.symbol, typeDef);
this.env.enclPkg.typeDefs.add(typeDef);
typeDef.index = this.env.enclPkg.typeDefs.size() - 1;
} | attachedFuncs = null; | public void visit(BLangTypeDefinition astTypeDefinition) {
Visibility visibility = getVisibility(astTypeDefinition.symbol);
List<BIRFunction> attachedFuncs;
int typeTag = astTypeDefinition.symbol.type.tag;
if (typeTag == TypeTags.OBJECT || typeTag == TypeTags.RECORD) {
attachedFuncs = new ArrayList<>();
} else {
attachedFuncs = null;
}
BIRTypeDefinition typeDef = new BIRTypeDefinition(astTypeDefinition.pos,
astTypeDefinition.symbol.name,
visibility,
astTypeDefinition.typeNode.type,
attachedFuncs);
typeDefs.put(astTypeDefinition.symbol, typeDef);
this.env.enclPkg.typeDefs.add(typeDef);
typeDef.index = this.env.enclPkg.typeDefs.size() - 1;
} | class BIRGen extends BLangNodeVisitor {
private static final CompilerContext.Key<BIRGen> BIR_GEN =
new CompilerContext.Key<>();
private BIRGenEnv env;
private Names names;
private final SymbolTable symTable;
private boolean varAssignment = false;
private Map<BTypeSymbol, BIRTypeDefinition> typeDefs = new LinkedHashMap<>();
public static BIRGen getInstance(CompilerContext context) {
BIRGen birGen = context.get(BIR_GEN);
if (birGen == null) {
birGen = new BIRGen(context);
}
return birGen;
}
private BIRGen(CompilerContext context) {
context.put(BIR_GEN, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
}
public BLangPackage genBIR(BLangPackage astPkg) {
astPkg.accept(this);
return astPkg;
}
public void visit(BLangPackage astPkg) {
BIRPackage birPkg = new BIRPackage(astPkg.pos, astPkg.packageID.orgName,
astPkg.packageID.name, astPkg.packageID.version);
astPkg.symbol.bir = birPkg;
this.env = new BIRGenEnv(birPkg);
astPkg.imports.forEach(impPkg -> impPkg.accept(this));
astPkg.typeDefinitions.forEach(astTypeDef -> astTypeDef.accept(this));
astPkg.globalVars.forEach(astGlobalVar -> astGlobalVar.accept(this));
astPkg.initFunction.accept(this);
astPkg.functions.forEach(astFunc -> astFunc.accept(this));
}
public void visit(BLangImportPackage impPkg) {
this.env.enclPkg.importModules.add(new BIRNode.BIRImportModule(impPkg.pos, impPkg.symbol.pkgID.orgName,
impPkg.symbol.pkgID.name, impPkg.symbol.pkgID.version));
}
public void visit(BLangFunction astFunc) {
Visibility visibility = getVisibility(astFunc.symbol);
BInvokableType type = astFunc.symbol.getType();
BIRFunction birFunc = new BIRFunction(astFunc.pos, astFunc.symbol.name, visibility, type);
birFunc.isDeclaration = Symbols.isNative(astFunc.symbol);
birFunc.argsCount = astFunc.requiredParams.size() +
astFunc.defaultableParams.size() + (astFunc.restParam != null ? 1 : 0);
if (astFunc.flagSet.contains(Flag.ATTACHED)) {
BTypeSymbol tsymbol = astFunc.receiver.type.tsymbol;
typeDefs.get(tsymbol).attachedFuncs.add(birFunc);
} else {
this.env.enclPkg.functions.add(birFunc);
}
this.env.enclFunc = birFunc;
if (astFunc.symbol.retType != null && astFunc.symbol.retType.tag != TypeTags.NIL) {
BIRVariableDcl retVarDcl = new BIRVariableDcl(astFunc.pos, astFunc.symbol.retType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.RETURN);
birFunc.localVars.add(retVarDcl);
}
for (BLangVariable requiredParam : astFunc.requiredParams) {
BIRVariableDcl birVarDcl = new BIRVariableDcl(requiredParam.pos, requiredParam.symbol.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.ARG);
birFunc.localVars.add(birVarDcl);
this.env.symbolVarMap.put(requiredParam.symbol, birVarDcl);
}
BIRBasicBlock entryBB = new BIRBasicBlock(this.env.nextBBId(names));
birFunc.basicBlocks.add(entryBB);
this.env.enclBB = entryBB;
astFunc.body.accept(this);
birFunc.basicBlocks.add(this.env.returnBB);
this.env.clear();
birFunc.basicBlocks.forEach(bb -> bb.id = this.env.nextBBId(names));
this.env.clear();
}
public void visit(BLangBlockStmt astBlockStmt) {
for (BLangStatement astStmt : astBlockStmt.stmts) {
astStmt.accept(this);
}
BIRBasicBlock enclBB = this.env.enclBB;
if (enclBB.instructions.size() == 0 && enclBB.terminator == null) {
enclBB.terminator = new BIRTerminator.GOTO(null, this.env.returnBB);
}
}
public void visit(BLangSimpleVariableDef astVarDefStmt) {
BIRVariableDcl birVarDcl = new BIRVariableDcl(astVarDefStmt.pos, astVarDefStmt.var.symbol.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.LOCAL);
this.env.enclFunc.localVars.add(birVarDcl);
this.env.symbolVarMap.put(astVarDefStmt.var.symbol, birVarDcl);
if (astVarDefStmt.var.expr == null) {
return;
}
astVarDefStmt.var.expr.accept(this);
BIROperand varRef = new BIROperand(birVarDcl);
emit(new Move(astVarDefStmt.pos, this.env.targetOperand, varRef));
}
public void visit(BLangSimpleVariable varNode) {
Visibility visibility = getVisibility(varNode.symbol);
BIRGlobalVariableDcl birVarDcl = new BIRGlobalVariableDcl(varNode.pos, visibility, varNode.symbol.type,
this.env.nextGlobalVarId(names), VarScope.GLOBAL, VarKind.GLOBAL);
this.env.enclPkg.globalVars.add(birVarDcl);
this.env.globalVarMap.put(varNode.symbol, birVarDcl);
}
public void visit(BLangAssignment astAssignStmt) {
astAssignStmt.expr.accept(this);
this.varAssignment = true;
astAssignStmt.varRef.accept(this);
this.varAssignment = false;
}
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr.accept(this);
}
public void visit(BLangInvocation invocationExpr) {
BIRBasicBlock thenBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(thenBB);
List<BLangExpression> requiredArgs = invocationExpr.requiredArgs;
List<BLangExpression> restArgs = invocationExpr.restArgs;
List<BIROperand> args = new ArrayList<>();
for (BLangExpression requiredArg : requiredArgs) {
requiredArg.accept(this);
args.add(this.env.targetOperand);
}
for (BLangExpression arg : restArgs) {
if (arg instanceof BLangArrayLiteral) {
BLangArrayLiteral arrArg = (BLangArrayLiteral) arg;
List<BLangExpression> exprs = arrArg.exprs;
for (BLangExpression expr : exprs) {
if (expr instanceof BLangTypeConversionExpr) {
BLangExpression innerExpr = ((BLangTypeConversionExpr) expr).expr;
innerExpr.accept(this);
args.add(this.env.targetOperand);
} else {
expr.accept(this);
args.add(this.env.targetOperand);
}
}
}
}
BIROperand lhsOp = null;
if (invocationExpr.type.tag != TypeTags.NIL) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(invocationExpr.type, this.env.nextLocalVarId(names),
VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
lhsOp = new BIROperand(tempVarDcl);
this.env.targetOperand = lhsOp;
}
if (!invocationExpr.async) {
this.env.enclBB.terminator = new BIRTerminator.Call(invocationExpr.pos,
InstructionKind.CALL, invocationExpr.symbol.pkgID,
names.fromString(invocationExpr.name.value), args, lhsOp, thenBB);
} else {
this.env.enclBB.terminator = new BIRTerminator.AsyncCall(invocationExpr.pos,
InstructionKind.ASYNC_CALL, invocationExpr.symbol.pkgID,
names.fromString(invocationExpr.name.value), args, lhsOp, thenBB);
}
this.env.enclBB = thenBB;
}
public void visit(BLangReturn astReturnStmt) {
if (astReturnStmt.expr.type.tag != TypeTags.NIL) {
astReturnStmt.expr.accept(this);
BIROperand retVarRef = new BIROperand(this.env.enclFunc.localVars.get(0));
emit(new Move(astReturnStmt.pos, this.env.targetOperand, retVarRef));
}
if (this.env.returnBB == null) {
BIRBasicBlock returnBB = new BIRBasicBlock(this.env.nextBBId(names));
returnBB.terminator = new BIRTerminator.Return(astReturnStmt.pos);
this.env.returnBB = returnBB;
}
this.env.enclBB.terminator = new BIRTerminator.GOTO(astReturnStmt.pos, this.env.returnBB);
}
public void visit(BLangIf astIfStmt) {
astIfStmt.expr.accept(this);
BIROperand ifExprResult = this.env.targetOperand;
BIRBasicBlock thenBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(thenBB);
BIRBasicBlock nextBB = new BIRBasicBlock(this.env.nextBBId(names));
BIRTerminator.Branch branchIns = new BIRTerminator.Branch(astIfStmt.pos, ifExprResult, thenBB, null);
this.env.enclBB.terminator = branchIns;
this.env.enclBB = thenBB;
astIfStmt.body.accept(this);
if (this.env.enclBB.terminator == null) {
this.env.enclBB.terminator = new BIRTerminator.GOTO(null, nextBB);
}
if (astIfStmt.elseStmt != null) {
BIRBasicBlock elseBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(elseBB);
branchIns.falseBB = elseBB;
this.env.enclBB = elseBB;
astIfStmt.elseStmt.accept(this);
if (this.env.enclBB.terminator == null) {
this.env.enclBB.terminator = new BIRTerminator.GOTO(null, nextBB);
}
} else {
branchIns.falseBB = nextBB;
}
this.env.enclFunc.basicBlocks.add(nextBB);
this.env.enclBB = nextBB;
}
public void visit(BLangWhile astWhileStmt) {
BIRBasicBlock whileExprBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(whileExprBB);
this.env.enclBB.terminator = new BIRTerminator.GOTO(astWhileStmt.pos, whileExprBB);
this.env.enclBB = whileExprBB;
astWhileStmt.expr.accept(this);
BIROperand whileExprResult = this.env.targetOperand;
BIRBasicBlock whileBodyBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(whileBodyBB);
BIRBasicBlock whileEndBB = new BIRBasicBlock(this.env.nextBBId(names));
whileExprBB.terminator = new BIRTerminator.Branch(astWhileStmt.pos, whileExprResult, whileBodyBB, whileEndBB);
this.env.enclBB = whileBodyBB;
astWhileStmt.body.accept(this);
if (this.env.enclBB.terminator == null) {
this.env.enclBB.terminator = new BIRTerminator.GOTO(null, whileExprBB);
} else {
throw new RuntimeException("there cannot be a terminator in while body basic block");
}
this.env.enclFunc.basicBlocks.add(whileEndBB);
this.env.enclBB = whileEndBB;
}
public void visit(BLangLiteral astLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.ConstantLoad(astLiteralExpr.pos,
astLiteralExpr.value, astLiteralExpr.type, toVarRef));
this.env.targetOperand = toVarRef;
}
public void visit(BLangMapLiteral astMapLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astMapLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.NewStructure(astMapLiteralExpr.pos, astMapLiteralExpr.type, toVarRef));
this.env.targetOperand = toVarRef;
for (BLangRecordKeyValue keyValue : astMapLiteralExpr.keyValuePairs) {
BLangExpression keyExpr = keyValue.key.expr;
keyExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
BLangExpression valueExpr = keyValue.valueExpr;
valueExpr.accept(this);
BIROperand rhsOp = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astMapLiteralExpr.pos,
InstructionKind.MAP_STORE, toVarRef, keyRegIndex, rhsOp));
}
this.env.targetOperand = toVarRef;
}
public void visit(BLangTypeConversionExpr astTypeConversionExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astTypeConversionExpr.targetType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
astTypeConversionExpr.expr.accept(this);
BIROperand rhsOp = this.env.targetOperand;
emit(new BIRNonTerminator.TypeCast(astTypeConversionExpr.pos, toVarRef, rhsOp));
this.env.targetOperand = toVarRef;
}
public void visit(BLangStructLiteral astStructLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astStructLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.NewStructure(astStructLiteralExpr.pos, astStructLiteralExpr.type, toVarRef));
this.env.targetOperand = toVarRef;
BRecordTypeSymbol structSymbol = (BRecordTypeSymbol) astStructLiteralExpr.type.tsymbol;
if (astStructLiteralExpr.initializer != null) {
}
for (BLangRecordKeyValue keyValue : astStructLiteralExpr.keyValuePairs) {
BLangRecordKey key = keyValue.key;
key.expr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
keyValue.valueExpr.accept(this);
BIROperand valueRegIndex = this.env.targetOperand;
emit(new FieldAccess(astStructLiteralExpr.pos,
InstructionKind.MAP_STORE, toVarRef, keyRegIndex, valueRegIndex));
}
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangTypeInit connectorInitExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(connectorInitExpr.type, this.env.nextLocalVarId(names),
VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.NewInstance(connectorInitExpr.pos,
typeDefs.get(connectorInitExpr.type.tsymbol),
toVarRef));
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangSimpleVarRef.BLangFieldVarRef fieldVarRef) {
}
public void visit(BLangArrayLiteral astArrayLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astArrayLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
long size = astArrayLiteralExpr.type.tag == TypeTags.ARRAY &&
((BArrayType) astArrayLiteralExpr.type).state != BArrayState.UNSEALED ?
(long) ((BArrayType) astArrayLiteralExpr.type).size : -1L;
BLangLiteral literal = new BLangLiteral();
literal.pos = astArrayLiteralExpr.pos;
literal.value = size;
literal.type = symTable.intType;
literal.accept(this);
BIROperand sizeOp = this.env.targetOperand;
emit(new BIRNonTerminator.NewArray(astArrayLiteralExpr.pos, astArrayLiteralExpr.type, toVarRef, sizeOp));
for (int i = 0; i < astArrayLiteralExpr.exprs.size(); i++) {
BLangExpression argExpr = astArrayLiteralExpr.exprs.get(i);
argExpr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
BLangLiteral indexLiteral = new BLangLiteral();
indexLiteral.pos = astArrayLiteralExpr.pos;
indexLiteral.value = (long) i;
indexLiteral.type = symTable.intType;
indexLiteral.accept(this);
BIROperand arrayIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astArrayLiteralExpr.pos,
InstructionKind.ARRAY_STORE, toVarRef, arrayIndex, exprIndex));
}
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangMapAccessExpr astMapAccessExpr) {
visitIndexBased(astMapAccessExpr);
}
@Override
public void visit(BLangStructFieldAccessExpr astStructFieldAccessExpr) {
visitIndexBased(astStructFieldAccessExpr);
}
private void visitIndexBased(BLangIndexBasedAccess astIndexBasedAccessExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand rhsOp = this.env.targetOperand;
astIndexBasedAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astIndexBasedAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astIndexBasedAccessExpr.pos,
InstructionKind.MAP_STORE, varRefRegIndex, keyRegIndex, rhsOp));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astIndexBasedAccessExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
astIndexBasedAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astIndexBasedAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astIndexBasedAccessExpr.pos,
InstructionKind.MAP_LOAD, tempVarRef, keyRegIndex, varRefRegIndex));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
public void visit(BLangArrayAccessExpr astArrayAccessExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand rhsOp = this.env.targetOperand;
astArrayAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astArrayAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astArrayAccessExpr.pos,
InstructionKind.ARRAY_STORE, varRefRegIndex, keyRegIndex, rhsOp));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astArrayAccessExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
astArrayAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astArrayAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astArrayAccessExpr.pos,
InstructionKind.ARRAY_LOAD, tempVarRef, keyRegIndex, varRefRegIndex));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
@Override
public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(bracedOrTupleExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
long size = bracedOrTupleExpr.expressions.size();
BLangLiteral literal = new BLangLiteral();
literal.pos = bracedOrTupleExpr.pos;
literal.value = size;
literal.type = symTable.intType;
literal.accept(this);
BIROperand sizeOp = this.env.targetOperand;
emit(new BIRNonTerminator.NewArray(bracedOrTupleExpr.pos, bracedOrTupleExpr.type, toVarRef, sizeOp));
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
BLangExpression argExpr = bracedOrTupleExpr.expressions.get(i);
argExpr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
BLangLiteral indexLiteral = new BLangLiteral();
indexLiteral.pos = bracedOrTupleExpr.pos;
indexLiteral.value = (long) i;
indexLiteral.type = symTable.intType;
indexLiteral.accept(this);
BIROperand arrayIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(bracedOrTupleExpr.pos,
InstructionKind.ARRAY_STORE, toVarRef, arrayIndex, exprIndex));
}
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(symTable.booleanType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
isLikeExpr.expr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
emit(new BIRNonTerminator.IsLike(isLikeExpr.pos, isLikeExpr.typeNode.type, toVarRef, exprIndex));
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(symTable.booleanType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
typeTestExpr.expr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
emit(new BIRNonTerminator.TypeTest(typeTestExpr.pos, typeTestExpr.typeNode.type, toVarRef, exprIndex));
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangLocalVarRef astVarRefExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand varRef = new BIROperand(this.env.symbolVarMap.get(astVarRefExpr.symbol));
emit(new Move(astVarRefExpr.pos, this.env.targetOperand, varRef));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astVarRefExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
BIROperand fromVarRef = new BIROperand(this.env.symbolVarMap.get(astVarRefExpr.symbol));
emit(new Move(astVarRefExpr.pos, fromVarRef, tempVarRef));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
public void visit(BLangPackageVarRef astPackageVarRefExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand varRef = new BIROperand(this.env.globalVarMap.get(astPackageVarRefExpr.symbol));
emit(new Move(astPackageVarRefExpr.pos, this.env.targetOperand, varRef));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astPackageVarRefExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
BIROperand fromVarRef = new BIROperand(this.env.globalVarMap.get(astPackageVarRefExpr.symbol));
emit(new Move(astPackageVarRefExpr.pos, fromVarRef, tempVarRef));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
public void visit(BLangBinaryExpr astBinaryExpr) {
astBinaryExpr.lhsExpr.accept(this);
BIROperand rhsOp1 = this.env.targetOperand;
astBinaryExpr.rhsExpr.accept(this);
BIROperand rhsOp2 = this.env.targetOperand;
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astBinaryExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand lhsOp = new BIROperand(tempVarDcl);
this.env.targetOperand = lhsOp;
BinaryOp binaryIns = new BinaryOp(astBinaryExpr.pos, getBinaryInstructionKind(astBinaryExpr.opKind),
astBinaryExpr.type, lhsOp, rhsOp1, rhsOp2);
emit(binaryIns);
}
public void visit(BLangErrorConstructorExpr errorExpr) {
BIRVariableDcl tempVarError = new BIRVariableDcl(errorExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarError);
BIROperand lhsOp = new BIROperand(tempVarError);
this.env.targetOperand = lhsOp;
errorExpr.reasonExpr.accept(this);
BIROperand reasonOp = this.env.targetOperand;
errorExpr.detailsExpr.accept(this);
BIROperand detailsOp = this.env.targetOperand;
BIRNonTerminator.NewError newError = new BIRNonTerminator.NewError(errorExpr.pos, InstructionKind.NEW_ERROR,
lhsOp, reasonOp, detailsOp);
emit(newError);
this.env.targetOperand = lhsOp;
}
private Visibility getVisibility(BSymbol symbol) {
if (Symbols.isPublic(symbol)) {
return Visibility.PUBLIC;
} else if (Symbols.isPrivate(symbol)) {
return Visibility.PRIVATE;
} else {
return Visibility.PACKAGE_PRIVATE;
}
}
private void emit(BIRInstruction instruction) {
this.env.enclBB.instructions.add(instruction);
}
private InstructionKind getBinaryInstructionKind(OperatorKind opKind) {
switch (opKind) {
case ADD:
return InstructionKind.ADD;
case SUB:
return InstructionKind.SUB;
case MUL:
return InstructionKind.MUL;
case DIV:
return InstructionKind.DIV;
case MOD:
return InstructionKind.MOD;
case EQUAL:
return InstructionKind.EQUAL;
case NOT_EQUAL:
return InstructionKind.NOT_EQUAL;
case GREATER_THAN:
return InstructionKind.GREATER_THAN;
case GREATER_EQUAL:
return InstructionKind.GREATER_EQUAL;
case LESS_THAN:
return InstructionKind.LESS_THAN;
case LESS_EQUAL:
return InstructionKind.LESS_EQUAL;
default:
throw new IllegalStateException("unsupported binary operation: " + opKind.value());
}
}
} | class BIRGen extends BLangNodeVisitor {
private static final CompilerContext.Key<BIRGen> BIR_GEN =
new CompilerContext.Key<>();
private BIRGenEnv env;
private Names names;
private final SymbolTable symTable;
private boolean varAssignment = false;
private Map<BTypeSymbol, BIRTypeDefinition> typeDefs = new LinkedHashMap<>();
public static BIRGen getInstance(CompilerContext context) {
BIRGen birGen = context.get(BIR_GEN);
if (birGen == null) {
birGen = new BIRGen(context);
}
return birGen;
}
private BIRGen(CompilerContext context) {
context.put(BIR_GEN, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
}
public BLangPackage genBIR(BLangPackage astPkg) {
astPkg.accept(this);
return astPkg;
}
public void visit(BLangPackage astPkg) {
BIRPackage birPkg = new BIRPackage(astPkg.pos, astPkg.packageID.orgName,
astPkg.packageID.name, astPkg.packageID.version);
astPkg.symbol.bir = birPkg;
this.env = new BIRGenEnv(birPkg);
astPkg.imports.forEach(impPkg -> impPkg.accept(this));
astPkg.typeDefinitions.forEach(astTypeDef -> astTypeDef.accept(this));
astPkg.globalVars.forEach(astGlobalVar -> astGlobalVar.accept(this));
astPkg.initFunction.accept(this);
astPkg.functions.forEach(astFunc -> astFunc.accept(this));
}
public void visit(BLangImportPackage impPkg) {
this.env.enclPkg.importModules.add(new BIRNode.BIRImportModule(impPkg.pos, impPkg.symbol.pkgID.orgName,
impPkg.symbol.pkgID.name, impPkg.symbol.pkgID.version));
}
public void visit(BLangFunction astFunc) {
Visibility visibility = getVisibility(astFunc.symbol);
BInvokableType type = astFunc.symbol.getType();
BIRFunction birFunc = new BIRFunction(astFunc.pos, astFunc.symbol.name, visibility, type);
birFunc.isDeclaration = Symbols.isNative(astFunc.symbol);
birFunc.argsCount = astFunc.requiredParams.size() +
astFunc.defaultableParams.size() + (astFunc.restParam != null ? 1 : 0);
if (astFunc.flagSet.contains(Flag.ATTACHED)) {
BTypeSymbol tsymbol = astFunc.receiver.type.tsymbol;
typeDefs.get(tsymbol).attachedFuncs.add(birFunc);
} else {
this.env.enclPkg.functions.add(birFunc);
}
this.env.enclFunc = birFunc;
if (astFunc.symbol.retType != null && astFunc.symbol.retType.tag != TypeTags.NIL) {
BIRVariableDcl retVarDcl = new BIRVariableDcl(astFunc.pos, astFunc.symbol.retType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.RETURN);
birFunc.localVars.add(retVarDcl);
}
for (BLangVariable requiredParam : astFunc.requiredParams) {
BIRVariableDcl birVarDcl = new BIRVariableDcl(requiredParam.pos, requiredParam.symbol.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.ARG);
birFunc.localVars.add(birVarDcl);
this.env.symbolVarMap.put(requiredParam.symbol, birVarDcl);
}
BIRBasicBlock entryBB = new BIRBasicBlock(this.env.nextBBId(names));
birFunc.basicBlocks.add(entryBB);
this.env.enclBB = entryBB;
astFunc.body.accept(this);
birFunc.basicBlocks.add(this.env.returnBB);
this.env.clear();
birFunc.basicBlocks.forEach(bb -> bb.id = this.env.nextBBId(names));
this.env.clear();
}
public void visit(BLangBlockStmt astBlockStmt) {
for (BLangStatement astStmt : astBlockStmt.stmts) {
astStmt.accept(this);
}
BIRBasicBlock enclBB = this.env.enclBB;
if (enclBB.instructions.size() == 0 && enclBB.terminator == null) {
enclBB.terminator = new BIRTerminator.GOTO(null, this.env.returnBB);
}
}
public void visit(BLangSimpleVariableDef astVarDefStmt) {
BIRVariableDcl birVarDcl = new BIRVariableDcl(astVarDefStmt.pos, astVarDefStmt.var.symbol.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.LOCAL);
this.env.enclFunc.localVars.add(birVarDcl);
this.env.symbolVarMap.put(astVarDefStmt.var.symbol, birVarDcl);
if (astVarDefStmt.var.expr == null) {
return;
}
astVarDefStmt.var.expr.accept(this);
BIROperand varRef = new BIROperand(birVarDcl);
emit(new Move(astVarDefStmt.pos, this.env.targetOperand, varRef));
}
public void visit(BLangSimpleVariable varNode) {
Visibility visibility = getVisibility(varNode.symbol);
BIRGlobalVariableDcl birVarDcl = new BIRGlobalVariableDcl(varNode.pos, visibility, varNode.symbol.type,
this.env.nextGlobalVarId(names), VarScope.GLOBAL, VarKind.GLOBAL);
this.env.enclPkg.globalVars.add(birVarDcl);
this.env.globalVarMap.put(varNode.symbol, birVarDcl);
}
public void visit(BLangAssignment astAssignStmt) {
astAssignStmt.expr.accept(this);
this.varAssignment = true;
astAssignStmt.varRef.accept(this);
this.varAssignment = false;
}
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr.accept(this);
}
public void visit(BLangInvocation invocationExpr) {
BIRBasicBlock thenBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(thenBB);
List<BLangExpression> requiredArgs = invocationExpr.requiredArgs;
List<BLangExpression> restArgs = invocationExpr.restArgs;
List<BIROperand> args = new ArrayList<>();
for (BLangExpression requiredArg : requiredArgs) {
requiredArg.accept(this);
args.add(this.env.targetOperand);
}
for (BLangExpression arg : restArgs) {
if (arg instanceof BLangArrayLiteral) {
BLangArrayLiteral arrArg = (BLangArrayLiteral) arg;
List<BLangExpression> exprs = arrArg.exprs;
for (BLangExpression expr : exprs) {
if (expr instanceof BLangTypeConversionExpr) {
BLangExpression innerExpr = ((BLangTypeConversionExpr) expr).expr;
innerExpr.accept(this);
args.add(this.env.targetOperand);
} else {
expr.accept(this);
args.add(this.env.targetOperand);
}
}
}
}
BIROperand lhsOp = null;
if (invocationExpr.type.tag != TypeTags.NIL) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(invocationExpr.type, this.env.nextLocalVarId(names),
VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
lhsOp = new BIROperand(tempVarDcl);
this.env.targetOperand = lhsOp;
}
if (!invocationExpr.async) {
this.env.enclBB.terminator = new BIRTerminator.Call(invocationExpr.pos,
InstructionKind.CALL, invocationExpr.symbol.pkgID,
names.fromString(invocationExpr.name.value), args, lhsOp, thenBB);
} else {
this.env.enclBB.terminator = new BIRTerminator.AsyncCall(invocationExpr.pos,
InstructionKind.ASYNC_CALL, invocationExpr.symbol.pkgID,
names.fromString(invocationExpr.name.value), args, lhsOp, thenBB);
}
this.env.enclBB = thenBB;
}
public void visit(BLangReturn astReturnStmt) {
if (astReturnStmt.expr.type.tag != TypeTags.NIL) {
astReturnStmt.expr.accept(this);
BIROperand retVarRef = new BIROperand(this.env.enclFunc.localVars.get(0));
emit(new Move(astReturnStmt.pos, this.env.targetOperand, retVarRef));
}
if (this.env.returnBB == null) {
BIRBasicBlock returnBB = new BIRBasicBlock(this.env.nextBBId(names));
returnBB.terminator = new BIRTerminator.Return(astReturnStmt.pos);
this.env.returnBB = returnBB;
}
this.env.enclBB.terminator = new BIRTerminator.GOTO(astReturnStmt.pos, this.env.returnBB);
}
public void visit(BLangIf astIfStmt) {
astIfStmt.expr.accept(this);
BIROperand ifExprResult = this.env.targetOperand;
BIRBasicBlock thenBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(thenBB);
BIRBasicBlock nextBB = new BIRBasicBlock(this.env.nextBBId(names));
BIRTerminator.Branch branchIns = new BIRTerminator.Branch(astIfStmt.pos, ifExprResult, thenBB, null);
this.env.enclBB.terminator = branchIns;
this.env.enclBB = thenBB;
astIfStmt.body.accept(this);
if (this.env.enclBB.terminator == null) {
this.env.enclBB.terminator = new BIRTerminator.GOTO(null, nextBB);
}
if (astIfStmt.elseStmt != null) {
BIRBasicBlock elseBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(elseBB);
branchIns.falseBB = elseBB;
this.env.enclBB = elseBB;
astIfStmt.elseStmt.accept(this);
if (this.env.enclBB.terminator == null) {
this.env.enclBB.terminator = new BIRTerminator.GOTO(null, nextBB);
}
} else {
branchIns.falseBB = nextBB;
}
this.env.enclFunc.basicBlocks.add(nextBB);
this.env.enclBB = nextBB;
}
public void visit(BLangWhile astWhileStmt) {
BIRBasicBlock whileExprBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(whileExprBB);
this.env.enclBB.terminator = new BIRTerminator.GOTO(astWhileStmt.pos, whileExprBB);
this.env.enclBB = whileExprBB;
astWhileStmt.expr.accept(this);
BIROperand whileExprResult = this.env.targetOperand;
BIRBasicBlock whileBodyBB = new BIRBasicBlock(this.env.nextBBId(names));
this.env.enclFunc.basicBlocks.add(whileBodyBB);
BIRBasicBlock whileEndBB = new BIRBasicBlock(this.env.nextBBId(names));
whileExprBB.terminator = new BIRTerminator.Branch(astWhileStmt.pos, whileExprResult, whileBodyBB, whileEndBB);
this.env.enclBB = whileBodyBB;
astWhileStmt.body.accept(this);
if (this.env.enclBB.terminator == null) {
this.env.enclBB.terminator = new BIRTerminator.GOTO(null, whileExprBB);
} else {
throw new RuntimeException("there cannot be a terminator in while body basic block");
}
this.env.enclFunc.basicBlocks.add(whileEndBB);
this.env.enclBB = whileEndBB;
}
public void visit(BLangLiteral astLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.ConstantLoad(astLiteralExpr.pos,
astLiteralExpr.value, astLiteralExpr.type, toVarRef));
this.env.targetOperand = toVarRef;
}
public void visit(BLangMapLiteral astMapLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astMapLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.NewStructure(astMapLiteralExpr.pos, astMapLiteralExpr.type, toVarRef));
this.env.targetOperand = toVarRef;
for (BLangRecordKeyValue keyValue : astMapLiteralExpr.keyValuePairs) {
BLangExpression keyExpr = keyValue.key.expr;
keyExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
BLangExpression valueExpr = keyValue.valueExpr;
valueExpr.accept(this);
BIROperand rhsOp = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astMapLiteralExpr.pos,
InstructionKind.MAP_STORE, toVarRef, keyRegIndex, rhsOp));
}
this.env.targetOperand = toVarRef;
}
public void visit(BLangTypeConversionExpr astTypeConversionExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astTypeConversionExpr.targetType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
astTypeConversionExpr.expr.accept(this);
BIROperand rhsOp = this.env.targetOperand;
emit(new BIRNonTerminator.TypeCast(astTypeConversionExpr.pos, toVarRef, rhsOp));
this.env.targetOperand = toVarRef;
}
public void visit(BLangStructLiteral astStructLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astStructLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.NewStructure(astStructLiteralExpr.pos, astStructLiteralExpr.type, toVarRef));
this.env.targetOperand = toVarRef;
BRecordTypeSymbol structSymbol = (BRecordTypeSymbol) astStructLiteralExpr.type.tsymbol;
if (astStructLiteralExpr.initializer != null) {
}
for (BLangRecordKeyValue keyValue : astStructLiteralExpr.keyValuePairs) {
BLangRecordKey key = keyValue.key;
key.expr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
keyValue.valueExpr.accept(this);
BIROperand valueRegIndex = this.env.targetOperand;
emit(new FieldAccess(astStructLiteralExpr.pos,
InstructionKind.MAP_STORE, toVarRef, keyRegIndex, valueRegIndex));
}
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangTypeInit connectorInitExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(connectorInitExpr.type, this.env.nextLocalVarId(names),
VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
emit(new BIRNonTerminator.NewInstance(connectorInitExpr.pos,
typeDefs.get(connectorInitExpr.type.tsymbol),
toVarRef));
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangSimpleVarRef.BLangFieldVarRef fieldVarRef) {
}
public void visit(BLangArrayLiteral astArrayLiteralExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astArrayLiteralExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
long size = astArrayLiteralExpr.type.tag == TypeTags.ARRAY &&
((BArrayType) astArrayLiteralExpr.type).state != BArrayState.UNSEALED ?
(long) ((BArrayType) astArrayLiteralExpr.type).size : -1L;
BLangLiteral literal = new BLangLiteral();
literal.pos = astArrayLiteralExpr.pos;
literal.value = size;
literal.type = symTable.intType;
literal.accept(this);
BIROperand sizeOp = this.env.targetOperand;
emit(new BIRNonTerminator.NewArray(astArrayLiteralExpr.pos, astArrayLiteralExpr.type, toVarRef, sizeOp));
for (int i = 0; i < astArrayLiteralExpr.exprs.size(); i++) {
BLangExpression argExpr = astArrayLiteralExpr.exprs.get(i);
argExpr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
BLangLiteral indexLiteral = new BLangLiteral();
indexLiteral.pos = astArrayLiteralExpr.pos;
indexLiteral.value = (long) i;
indexLiteral.type = symTable.intType;
indexLiteral.accept(this);
BIROperand arrayIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astArrayLiteralExpr.pos,
InstructionKind.ARRAY_STORE, toVarRef, arrayIndex, exprIndex));
}
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangMapAccessExpr astMapAccessExpr) {
visitIndexBased(astMapAccessExpr);
}
@Override
public void visit(BLangStructFieldAccessExpr astStructFieldAccessExpr) {
visitIndexBased(astStructFieldAccessExpr);
}
private void visitIndexBased(BLangIndexBasedAccess astIndexBasedAccessExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand rhsOp = this.env.targetOperand;
astIndexBasedAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astIndexBasedAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astIndexBasedAccessExpr.pos,
InstructionKind.MAP_STORE, varRefRegIndex, keyRegIndex, rhsOp));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astIndexBasedAccessExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
astIndexBasedAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astIndexBasedAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astIndexBasedAccessExpr.pos,
InstructionKind.MAP_LOAD, tempVarRef, keyRegIndex, varRefRegIndex));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
public void visit(BLangArrayAccessExpr astArrayAccessExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand rhsOp = this.env.targetOperand;
astArrayAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astArrayAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astArrayAccessExpr.pos,
InstructionKind.ARRAY_STORE, varRefRegIndex, keyRegIndex, rhsOp));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astArrayAccessExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
astArrayAccessExpr.expr.accept(this);
BIROperand varRefRegIndex = this.env.targetOperand;
astArrayAccessExpr.indexExpr.accept(this);
BIROperand keyRegIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(astArrayAccessExpr.pos,
InstructionKind.ARRAY_LOAD, tempVarRef, keyRegIndex, varRefRegIndex));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
@Override
public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(bracedOrTupleExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
long size = bracedOrTupleExpr.expressions.size();
BLangLiteral literal = new BLangLiteral();
literal.pos = bracedOrTupleExpr.pos;
literal.value = size;
literal.type = symTable.intType;
literal.accept(this);
BIROperand sizeOp = this.env.targetOperand;
emit(new BIRNonTerminator.NewArray(bracedOrTupleExpr.pos, bracedOrTupleExpr.type, toVarRef, sizeOp));
for (int i = 0; i < bracedOrTupleExpr.expressions.size(); i++) {
BLangExpression argExpr = bracedOrTupleExpr.expressions.get(i);
argExpr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
BLangLiteral indexLiteral = new BLangLiteral();
indexLiteral.pos = bracedOrTupleExpr.pos;
indexLiteral.value = (long) i;
indexLiteral.type = symTable.intType;
indexLiteral.accept(this);
BIROperand arrayIndex = this.env.targetOperand;
emit(new BIRNonTerminator.FieldAccess(bracedOrTupleExpr.pos,
InstructionKind.ARRAY_STORE, toVarRef, arrayIndex, exprIndex));
}
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(symTable.booleanType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
isLikeExpr.expr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
emit(new BIRNonTerminator.IsLike(isLikeExpr.pos, isLikeExpr.typeNode.type, toVarRef, exprIndex));
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(symTable.booleanType,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand toVarRef = new BIROperand(tempVarDcl);
typeTestExpr.expr.accept(this);
BIROperand exprIndex = this.env.targetOperand;
emit(new BIRNonTerminator.TypeTest(typeTestExpr.pos, typeTestExpr.typeNode.type, toVarRef, exprIndex));
this.env.targetOperand = toVarRef;
}
@Override
public void visit(BLangLocalVarRef astVarRefExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand varRef = new BIROperand(this.env.symbolVarMap.get(astVarRefExpr.symbol));
emit(new Move(astVarRefExpr.pos, this.env.targetOperand, varRef));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astVarRefExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
BIROperand fromVarRef = new BIROperand(this.env.symbolVarMap.get(astVarRefExpr.symbol));
emit(new Move(astVarRefExpr.pos, fromVarRef, tempVarRef));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
public void visit(BLangPackageVarRef astPackageVarRefExpr) {
boolean variableStore = this.varAssignment;
this.varAssignment = false;
if (variableStore) {
BIROperand varRef = new BIROperand(this.env.globalVarMap.get(astPackageVarRefExpr.symbol));
emit(new Move(astPackageVarRefExpr.pos, this.env.targetOperand, varRef));
} else {
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astPackageVarRefExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand tempVarRef = new BIROperand(tempVarDcl);
BIROperand fromVarRef = new BIROperand(this.env.globalVarMap.get(astPackageVarRefExpr.symbol));
emit(new Move(astPackageVarRefExpr.pos, fromVarRef, tempVarRef));
this.env.targetOperand = tempVarRef;
}
this.varAssignment = variableStore;
}
public void visit(BLangBinaryExpr astBinaryExpr) {
astBinaryExpr.lhsExpr.accept(this);
BIROperand rhsOp1 = this.env.targetOperand;
astBinaryExpr.rhsExpr.accept(this);
BIROperand rhsOp2 = this.env.targetOperand;
BIRVariableDcl tempVarDcl = new BIRVariableDcl(astBinaryExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarDcl);
BIROperand lhsOp = new BIROperand(tempVarDcl);
this.env.targetOperand = lhsOp;
BinaryOp binaryIns = new BinaryOp(astBinaryExpr.pos, getBinaryInstructionKind(astBinaryExpr.opKind),
astBinaryExpr.type, lhsOp, rhsOp1, rhsOp2);
emit(binaryIns);
}
public void visit(BLangErrorConstructorExpr errorExpr) {
BIRVariableDcl tempVarError = new BIRVariableDcl(errorExpr.type,
this.env.nextLocalVarId(names), VarScope.FUNCTION, VarKind.TEMP);
this.env.enclFunc.localVars.add(tempVarError);
BIROperand lhsOp = new BIROperand(tempVarError);
this.env.targetOperand = lhsOp;
errorExpr.reasonExpr.accept(this);
BIROperand reasonOp = this.env.targetOperand;
errorExpr.detailsExpr.accept(this);
BIROperand detailsOp = this.env.targetOperand;
BIRNonTerminator.NewError newError = new BIRNonTerminator.NewError(errorExpr.pos, InstructionKind.NEW_ERROR,
lhsOp, reasonOp, detailsOp);
emit(newError);
this.env.targetOperand = lhsOp;
}
private Visibility getVisibility(BSymbol symbol) {
if (Symbols.isPublic(symbol)) {
return Visibility.PUBLIC;
} else if (Symbols.isPrivate(symbol)) {
return Visibility.PRIVATE;
} else {
return Visibility.PACKAGE_PRIVATE;
}
}
private void emit(BIRInstruction instruction) {
this.env.enclBB.instructions.add(instruction);
}
private InstructionKind getBinaryInstructionKind(OperatorKind opKind) {
switch (opKind) {
case ADD:
return InstructionKind.ADD;
case SUB:
return InstructionKind.SUB;
case MUL:
return InstructionKind.MUL;
case DIV:
return InstructionKind.DIV;
case MOD:
return InstructionKind.MOD;
case EQUAL:
return InstructionKind.EQUAL;
case NOT_EQUAL:
return InstructionKind.NOT_EQUAL;
case GREATER_THAN:
return InstructionKind.GREATER_THAN;
case GREATER_EQUAL:
return InstructionKind.GREATER_EQUAL;
case LESS_THAN:
return InstructionKind.LESS_THAN;
case LESS_EQUAL:
return InstructionKind.LESS_EQUAL;
default:
throw new IllegalStateException("unsupported binary operation: " + opKind.value());
}
}
} |
the type nullable may not be correct if the input is null , why the return type is DataTypes.MULTISET(argumentDataTypes.get(0)).notNull(), which is not null | public Optional<DataType> inferType(CallContext callContext) {
List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
if (argumentDataTypes.size() != 1) {
return Optional.empty();
}
return Optional.of(DataTypes.MULTISET(argumentDataTypes.get(0)).notNull());
} | } | public Optional<DataType> inferType(CallContext callContext) {
List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
if (argumentDataTypes.size() != 1) {
return Optional.empty();
}
return Optional.of(DataTypes.MULTISET(argumentDataTypes.get(0)).notNull());
} | class CollectTypeStrategy implements TypeStrategy {
@Override
} | class CollectTypeStrategy implements TypeStrategy {
@Override
} |
As far as I have understood, this whole method is actually not doing anything for `coordinatorClient`. Shouldn't we just add a shortcut? ``` if (coordinatorClient == null) { return Collections.emptyList(); } ``` Then we could simplify `notifyCheckpointComplete` and just have some `checkState(coordinatorClient != null)` when it actually comes to committing. | public List<PulsarPartitionSplit> snapshotState(long checkpointId) {
LOG.debug("Trigger the new transaction for downstream readers.");
List<PulsarPartitionSplit> splits =
((PulsarUnorderedFetcherManager<OUT>) splitFetcherManager)
.snapshotState(checkpointId);
if (splits.isEmpty() && transactionsOfFinishedSplits.isEmpty()) {
transactionsToCommit.put(checkpointId, Collections.emptyList());
} else {
List<TxnID> txnIDs =
transactionsToCommit.computeIfAbsent(checkpointId, id -> new ArrayList<>());
for (PulsarPartitionSplit split : splits) {
TxnID uncommittedTransactionId = split.getUncommittedTransactionId();
if (uncommittedTransactionId != null) {
txnIDs.add(uncommittedTransactionId);
}
}
}
return splits;
} | .snapshotState(checkpointId); | public List<PulsarPartitionSplit> snapshotState(long checkpointId) {
LOG.debug("Trigger the new transaction for downstream readers.");
List<PulsarPartitionSplit> splits =
((PulsarUnorderedFetcherManager<OUT>) splitFetcherManager)
.snapshotState(checkpointId);
if (coordinatorClient != null) {
List<TxnID> txnIDs =
transactionsToCommit.computeIfAbsent(checkpointId, id -> new ArrayList<>());
for (PulsarPartitionSplit split : splits) {
TxnID uncommittedTransactionId = split.getUncommittedTransactionId();
if (uncommittedTransactionId != null) {
txnIDs.add(uncommittedTransactionId);
}
}
}
return splits;
} | class PulsarUnorderedSourceReader<OUT> extends PulsarSourceReaderBase<OUT> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarUnorderedSourceReader.class);
private final TransactionCoordinatorClient coordinatorClient;
private final SortedMap<Long, List<TxnID>> transactionsToCommit;
private final List<TxnID> transactionsOfFinishedSplits;
public PulsarUnorderedSourceReader(
FutureCompletingBlockingQueue<RecordsWithSplitIds<PulsarMessage<OUT>>> elementsQueue,
Supplier<PulsarUnorderedPartitionSplitReader<OUT>> splitReaderSupplier,
Configuration configuration,
SourceReaderContext context,
SourceConfiguration sourceConfiguration,
PulsarClient pulsarClient,
PulsarAdmin pulsarAdmin,
TransactionCoordinatorClient coordinatorClient) {
super(
elementsQueue,
new PulsarUnorderedFetcherManager<>(elementsQueue, splitReaderSupplier::get),
configuration,
context,
sourceConfiguration,
pulsarClient,
pulsarAdmin);
this.coordinatorClient = coordinatorClient;
this.transactionsToCommit = Collections.synchronizedSortedMap(new TreeMap<>());
this.transactionsOfFinishedSplits = Collections.synchronizedList(new ArrayList<>());
}
@Override
protected void onSplitFinished(Map<String, PulsarPartitionSplitState> finishedSplitIds) {
if (LOG.isDebugEnabled()) {
LOG.debug("onSplitFinished event: {}", finishedSplitIds);
}
for (Map.Entry<String, PulsarPartitionSplitState> entry : finishedSplitIds.entrySet()) {
PulsarPartitionSplitState state = entry.getValue();
TxnID uncommittedTransactionId = state.getUncommittedTransactionId();
if (uncommittedTransactionId != null) {
transactionsOfFinishedSplits.add(uncommittedTransactionId);
}
}
}
@Override
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
LOG.debug("Committing transactions for checkpoint {}", checkpointId);
if (coordinatorClient != null) {
for (Map.Entry<Long, List<TxnID>> entry : transactionsToCommit.entrySet()) {
Long currentCheckpointId = entry.getKey();
if (currentCheckpointId > checkpointId) {
continue;
}
List<TxnID> transactions = entry.getValue();
for (TxnID transaction : transactions) {
coordinatorClient.commit(transaction);
transactionsOfFinishedSplits.removeIf(txnID -> txnID.equals(transaction));
}
transactionsToCommit.remove(currentCheckpointId);
}
} else {
transactionsToCommit.entrySet().removeIf(e -> e.getKey() < checkpointId);
transactionsOfFinishedSplits.clear();
}
}
} | class PulsarUnorderedSourceReader<OUT> extends PulsarSourceReaderBase<OUT> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarUnorderedSourceReader.class);
@Nullable private final TransactionCoordinatorClient coordinatorClient;
private final SortedMap<Long, List<TxnID>> transactionsToCommit;
private final List<TxnID> transactionsOfFinishedSplits;
public PulsarUnorderedSourceReader(
FutureCompletingBlockingQueue<RecordsWithSplitIds<PulsarMessage<OUT>>> elementsQueue,
Supplier<PulsarUnorderedPartitionSplitReader<OUT>> splitReaderSupplier,
Configuration configuration,
SourceReaderContext context,
SourceConfiguration sourceConfiguration,
PulsarClient pulsarClient,
PulsarAdmin pulsarAdmin,
@Nullable TransactionCoordinatorClient coordinatorClient) {
super(
elementsQueue,
new PulsarUnorderedFetcherManager<>(elementsQueue, splitReaderSupplier::get),
configuration,
context,
sourceConfiguration,
pulsarClient,
pulsarAdmin);
this.coordinatorClient = coordinatorClient;
this.transactionsToCommit = Collections.synchronizedSortedMap(new TreeMap<>());
this.transactionsOfFinishedSplits = Collections.synchronizedList(new ArrayList<>());
}
@Override
protected void onSplitFinished(Map<String, PulsarPartitionSplitState> finishedSplitIds) {
if (LOG.isDebugEnabled()) {
LOG.debug("onSplitFinished event: {}", finishedSplitIds);
}
if (coordinatorClient != null) {
for (Map.Entry<String, PulsarPartitionSplitState> entry : finishedSplitIds.entrySet()) {
PulsarPartitionSplitState state = entry.getValue();
TxnID uncommittedTransactionId = state.getUncommittedTransactionId();
if (uncommittedTransactionId != null) {
transactionsOfFinishedSplits.add(uncommittedTransactionId);
}
}
}
}
@Override
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
LOG.debug("Committing transactions for checkpoint {}", checkpointId);
if (coordinatorClient != null) {
for (Map.Entry<Long, List<TxnID>> entry : transactionsToCommit.entrySet()) {
Long currentCheckpointId = entry.getKey();
if (currentCheckpointId > checkpointId) {
continue;
}
List<TxnID> transactions = entry.getValue();
for (TxnID transaction : transactions) {
coordinatorClient.commit(transaction);
transactionsOfFinishedSplits.remove(transaction);
}
transactionsToCommit.remove(currentCheckpointId);
}
}
}
} |
What about the distributor!? :) | private StorageNode buildSingleNode(DeployState deployState, ContentCluster parent) {
int distributionKey = 0;
StorageNode sNode = new StorageNode(deployState.getProperties(), parent.getStorageCluster(), 1.0, distributionKey , false);
sNode.setHostResource(parent.hostSystem().getHost(Container.SINGLENODE_CONTAINER_SERVICESPEC));
sNode.initService(deployLogger);
PersistenceEngine provider = parent.getPersistence().create(deployState, sNode, storageGroup, null);
new Distributor(deployState.getProperties(), parent.getDistributorNodes(), distributionKey, null, provider);
return sNode;
} | sNode.initService(deployLogger); | private StorageNode buildSingleNode(DeployState deployState, ContentCluster parent) {
int distributionKey = 0;
StorageNode searchNode = new StorageNode(deployState.getProperties(), parent.getStorageCluster(), 1.0, distributionKey , false);
searchNode.setHostResource(parent.hostSystem().getHost(Container.SINGLENODE_CONTAINER_SERVICESPEC));
PersistenceEngine provider = parent.getPersistence().create(deployState, searchNode, storageGroup, null);
searchNode.initService(deployLogger);
Distributor distributor = new Distributor(deployState.getProperties(), parent.getDistributorNodes(), distributionKey, null, provider);
distributor.setHostResource(searchNode.getHostResource());
distributor.initService(deployLogger);
return searchNode;
} | class GroupBuilder {
private final StorageGroup storageGroup;
/* The explicitly defined subgroups of this */
private final List<GroupBuilder> subGroups;
private final List<XmlNodeBuilder> nodeBuilders;
/** The nodes explicitly specified as a nodes tag in this group, or empty if none */
private final Optional<NodesSpecification> nodeRequirement;
private final DeployLogger deployLogger;
private GroupBuilder(StorageGroup storageGroup, List<GroupBuilder> subGroups, List<XmlNodeBuilder> nodeBuilders,
Optional<NodesSpecification> nodeRequirement, DeployLogger deployLogger) {
this.storageGroup = storageGroup;
this.subGroups = subGroups;
this.nodeBuilders = nodeBuilders;
this.nodeRequirement = nodeRequirement;
this.deployLogger = deployLogger;
}
/**
* Builds a storage group for a nonhosted environment
*
* @param owner the cluster owning this
* @param parent the parent storage group, or empty if this is the root group
* @return the storage group build by this
*/
public StorageGroup buildNonHosted(DeployState deployState, ContentCluster owner, Optional<GroupBuilder> parent) {
for (GroupBuilder subGroup : subGroups) {
storageGroup.subgroups.add(subGroup.buildNonHosted(deployState, owner, Optional.of(this)));
}
for (XmlNodeBuilder nodeBuilder : nodeBuilders) {
storageGroup.nodes.add(nodeBuilder.build(deployState, owner, storageGroup));
}
if (parent.isEmpty() && subGroups.isEmpty() && nodeBuilders.isEmpty()) {
storageGroup.nodes.add(buildSingleNode(deployState, owner));
}
return storageGroup;
}
/**
* Builds a storage group for a hosted environment
*
* @param owner the cluster owning this
* @param parent the parent storage group, or empty if this is the root group
* @return the storage group build by this
*/
public StorageGroup buildHosted(DeployState deployState, ContentCluster owner, Optional<GroupBuilder> parent) {
if (storageGroup.getIndex() != null)
throw new IllegalArgumentException("Specifying individual groups is not supported on hosted applications");
Map<HostResource, ClusterMembership> hostMapping =
nodeRequirement.isPresent() ?
provisionHosts(nodeRequirement.get(), owner.getStorageCluster().getClusterName(), owner.getRoot().hostSystem(), deployLogger) :
Collections.emptyMap();
Map<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> hostGroups = collectAllocatedSubgroups(hostMapping);
if (hostGroups.size() > 1) {
if (parent.isPresent())
throw new IllegalArgumentException("Cannot specify groups using the groups attribute in nested content groups");
for (Map.Entry<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> hostGroup : hostGroups.entrySet()) {
String groupIndex = String.valueOf(hostGroup.getKey().get().index());
StorageGroup subgroup = new StorageGroup(true, groupIndex, groupIndex);
for (Map.Entry<HostResource, ClusterMembership> host : hostGroup.getValue().entrySet()) {
subgroup.nodes.add(createStorageNode(deployState, owner, host.getKey(), subgroup, host.getValue()));
}
storageGroup.subgroups.add(subgroup);
}
}
else {
for (Map.Entry<HostResource, ClusterMembership> host : hostMapping.entrySet()) {
storageGroup.nodes.add(createStorageNode(deployState, owner, host.getKey(), storageGroup, host.getValue()));
}
for (GroupBuilder subGroup : subGroups) {
storageGroup.subgroups.add(subGroup.buildHosted(deployState, owner, Optional.of(this)));
}
}
return storageGroup;
}
/** Collect hosts per group */
private Map<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> collectAllocatedSubgroups(Map<HostResource, ClusterMembership> hostMapping) {
Map<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> hostsPerGroup = new LinkedHashMap<>();
for (Map.Entry<HostResource, ClusterMembership> entry : hostMapping.entrySet()) {
Optional<ClusterSpec.Group> group = entry.getValue().cluster().group();
Map<HostResource, ClusterMembership> hostsInGroup = hostsPerGroup.get(group);
if (hostsInGroup == null) {
hostsInGroup = new LinkedHashMap<>();
hostsPerGroup.put(group, hostsInGroup);
}
hostsInGroup.put(entry.getKey(), entry.getValue());
}
return hostsPerGroup;
}
} | class GroupBuilder {
private final StorageGroup storageGroup;
/* The explicitly defined subgroups of this */
private final List<GroupBuilder> subGroups;
private final List<XmlNodeBuilder> nodeBuilders;
/** The nodes explicitly specified as a nodes tag in this group, or empty if none */
private final Optional<NodesSpecification> nodeRequirement;
private final DeployLogger deployLogger;
private GroupBuilder(StorageGroup storageGroup, List<GroupBuilder> subGroups, List<XmlNodeBuilder> nodeBuilders,
Optional<NodesSpecification> nodeRequirement, DeployLogger deployLogger) {
this.storageGroup = storageGroup;
this.subGroups = subGroups;
this.nodeBuilders = nodeBuilders;
this.nodeRequirement = nodeRequirement;
this.deployLogger = deployLogger;
}
/**
* Builds a storage group for a nonhosted environment
*
* @param owner the cluster owning this
* @param parent the parent storage group, or empty if this is the root group
* @return the storage group build by this
*/
public StorageGroup buildNonHosted(DeployState deployState, ContentCluster owner, Optional<GroupBuilder> parent) {
for (GroupBuilder subGroup : subGroups) {
storageGroup.subgroups.add(subGroup.buildNonHosted(deployState, owner, Optional.of(this)));
}
for (XmlNodeBuilder nodeBuilder : nodeBuilders) {
storageGroup.nodes.add(nodeBuilder.build(deployState, owner, storageGroup));
}
if (parent.isEmpty() && subGroups.isEmpty() && nodeBuilders.isEmpty()) {
storageGroup.nodes.add(buildSingleNode(deployState, owner));
}
return storageGroup;
}
/**
* Builds a storage group for a hosted environment
*
* @param owner the cluster owning this
* @param parent the parent storage group, or empty if this is the root group
* @return the storage group build by this
*/
public StorageGroup buildHosted(DeployState deployState, ContentCluster owner, Optional<GroupBuilder> parent) {
if (storageGroup.getIndex() != null)
throw new IllegalArgumentException("Specifying individual groups is not supported on hosted applications");
Map<HostResource, ClusterMembership> hostMapping =
nodeRequirement.isPresent() ?
provisionHosts(nodeRequirement.get(), owner.getStorageCluster().getClusterName(), owner.getRoot().hostSystem(), deployLogger) :
Collections.emptyMap();
Map<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> hostGroups = collectAllocatedSubgroups(hostMapping);
if (hostGroups.size() > 1) {
if (parent.isPresent())
throw new IllegalArgumentException("Cannot specify groups using the groups attribute in nested content groups");
for (Map.Entry<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> hostGroup : hostGroups.entrySet()) {
String groupIndex = String.valueOf(hostGroup.getKey().get().index());
StorageGroup subgroup = new StorageGroup(true, groupIndex, groupIndex);
for (Map.Entry<HostResource, ClusterMembership> host : hostGroup.getValue().entrySet()) {
subgroup.nodes.add(createStorageNode(deployState, owner, host.getKey(), subgroup, host.getValue()));
}
storageGroup.subgroups.add(subgroup);
}
}
else {
for (Map.Entry<HostResource, ClusterMembership> host : hostMapping.entrySet()) {
storageGroup.nodes.add(createStorageNode(deployState, owner, host.getKey(), storageGroup, host.getValue()));
}
for (GroupBuilder subGroup : subGroups) {
storageGroup.subgroups.add(subGroup.buildHosted(deployState, owner, Optional.of(this)));
}
}
return storageGroup;
}
/** Collect hosts per group */
private Map<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> collectAllocatedSubgroups(Map<HostResource, ClusterMembership> hostMapping) {
Map<Optional<ClusterSpec.Group>, Map<HostResource, ClusterMembership>> hostsPerGroup = new LinkedHashMap<>();
for (Map.Entry<HostResource, ClusterMembership> entry : hostMapping.entrySet()) {
Optional<ClusterSpec.Group> group = entry.getValue().cluster().group();
Map<HostResource, ClusterMembership> hostsInGroup = hostsPerGroup.get(group);
if (hostsInGroup == null) {
hostsInGroup = new LinkedHashMap<>();
hostsPerGroup.put(group, hostsInGroup);
}
hostsInGroup.put(entry.getKey(), entry.getValue());
}
return hostsPerGroup;
}
} |
it is the opinion that is negated, not the aspect :) | public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true)
.setIncludeStatistics(true);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs, options, Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
} | System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n", | public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true)
.setIncludeStatistics(true);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs, options, Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
} | class TextAnalyticsClientJavaDocCodeSnippets {
private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguage() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageWithCountryHint() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(
"This text is in English", "US");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
DetectLanguageResultCollection resultCollection =
textAnalyticsClient.detectLanguageBatch(documents, "US", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "es")
);
Response<DetectLanguageResultCollection> response =
textAnalyticsClient.detectLanguageBatchWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection detectedLanguageResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = detectedLanguageResultCollection.getStatistics();
System.out.printf(
"Documents statistics: document count = %s, erroneous document count = %s, transaction count = %s,"
+ " valid document count = %s.%n",
batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(),
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
detectedLanguageResultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntities() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesWithLanguage() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft", "en");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
RecognizeEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en")
);
Response<RecognizeEntitiesResultCollection> response =
textAnalyticsClient.recognizeEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection recognizeEntitiesResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
recognizeEntitiesResultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntities() {
final String document = "Old Faithful is a geyser at Yellowstone Park.";
System.out.println("Linked Entities:");
textAnalyticsClient.recognizeLinkedEntities(document).forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsClient.recognizeLinkedEntities(document, "en").forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
RecognizeLinkedEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeLinkedEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesBatchMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("en")
);
Response<RecognizeLinkedEntitiesResultCollection> response =
textAnalyticsClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
for (String keyPhrase : textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.")) {
System.out.printf("%s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.", "en")
.forEach(kegPhrase -> System.out.printf("%s.%n", kegPhrase));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
ExtractKeyPhrasesResultCollection resultCollection =
textAnalyticsClient.extractKeyPhrasesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.").setLanguage("en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("en")
);
Response<ExtractKeyPhrasesResultCollection> response =
textAnalyticsClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentiment() {
final DocumentSentiment documentSentiment =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguage() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection =
textAnalyticsClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions());
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
} | class TextAnalyticsClientJavaDocCodeSnippets {
private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguage() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageWithCountryHint() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(
"This text is in English", "US");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
DetectLanguageResultCollection resultCollection =
textAnalyticsClient.detectLanguageBatch(documents, "US", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "es")
);
Response<DetectLanguageResultCollection> response =
textAnalyticsClient.detectLanguageBatchWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection detectedLanguageResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = detectedLanguageResultCollection.getStatistics();
System.out.printf(
"Documents statistics: document count = %s, erroneous document count = %s, transaction count = %s,"
+ " valid document count = %s.%n",
batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(),
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
detectedLanguageResultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntities() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesWithLanguage() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft", "en");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
RecognizeEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en")
);
Response<RecognizeEntitiesResultCollection> response =
textAnalyticsClient.recognizeEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection recognizeEntitiesResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
recognizeEntitiesResultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntities() {
for (PiiEntity entity : textAnalyticsClient.recognizePiiEntities("My SSN is 859-98-0987")) {
System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesWithLanguage() {
textAnalyticsClient.recognizePiiEntities("My SSN is 859-98-0987", "en")
.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987",
"Visa card 4111 1111 1111 1111"
);
RecognizePiiEntitiesResultCollection resultCollection = textAnalyticsClient.recognizePiiEntitiesBatch(
documents, "en", new TextAnalyticsRequestOptions().setIncludeStatistics(true));
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111")
);
Response<RecognizePiiEntitiesResultCollection> response =
textAnalyticsClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
RecognizePiiEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntities() {
final String document = "Old Faithful is a geyser at Yellowstone Park.";
System.out.println("Linked Entities:");
textAnalyticsClient.recognizeLinkedEntities(document).forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsClient.recognizeLinkedEntities(document, "en").forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
RecognizeLinkedEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeLinkedEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesBatchMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("en")
);
Response<RecognizeLinkedEntitiesResultCollection> response =
textAnalyticsClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
for (String keyPhrase : textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.")) {
System.out.printf("%s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.", "en")
.forEach(kegPhrase -> System.out.printf("%s.%n", kegPhrase));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
ExtractKeyPhrasesResultCollection resultCollection =
textAnalyticsClient.extractKeyPhrasesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.").setLanguage("en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("en")
);
Response<ExtractKeyPhrasesResultCollection> response =
textAnalyticsClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentiment() {
final DocumentSentiment documentSentiment =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguage() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new TextAnalyticsRequestOptions().setIncludeStatistics(true));
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
} |
Shall we pass the logger from outside instead of expose it as `public/(default)`? | private void cleanup() throws Exception {
StreamTask.LOG.debug(
"Cleanup AsyncCheckpointRunnable for checkpoint {} of {}.",
checkpointMetaData.getCheckpointId(),
taskName);
Exception exception = null;
for (OperatorSnapshotFutures operatorSnapshotResult : operatorSnapshotsInProgress.values()) {
if (operatorSnapshotResult != null) {
try {
operatorSnapshotResult.cancel();
} catch (Exception cancelException) {
exception = ExceptionUtils.firstOrSuppressed(cancelException, exception);
}
}
}
if (null != exception) {
throw exception;
}
} | StreamTask.LOG.debug( | private void cleanup() throws Exception {
LOG.debug(
"Cleanup AsyncCheckpointRunnable for checkpoint {} of {}.",
checkpointMetaData.getCheckpointId(),
taskName);
Exception exception = null;
for (OperatorSnapshotFutures operatorSnapshotResult : operatorSnapshotsInProgress.values()) {
if (operatorSnapshotResult != null) {
try {
operatorSnapshotResult.cancel();
} catch (Exception cancelException) {
exception = ExceptionUtils.firstOrSuppressed(cancelException, exception);
}
}
}
if (null != exception) {
throw exception;
}
} | class AsyncCheckpointRunnable implements Runnable, Closeable {
private final String taskName;
private final CloseableRegistry closeableRegistry;
private final Environment taskEnvironment;
private enum AsyncCheckpointState {
RUNNING,
DISCARDED,
COMPLETED
}
private final AsyncExceptionHandler asyncExceptionHandler;
private final Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress;
private final CheckpointMetaData checkpointMetaData;
private final CheckpointMetrics checkpointMetrics;
private final long asyncStartNanos;
private final AtomicReference<AsyncCheckpointState> asyncCheckpointState = new AtomicReference<>(AsyncCheckpointState.RUNNING);
AsyncCheckpointRunnable(
Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress,
CheckpointMetaData checkpointMetaData,
CheckpointMetrics checkpointMetrics,
long asyncStartNanos,
String taskName,
CloseableRegistry closeableRegistry,
Environment taskEnvironment,
AsyncExceptionHandler asyncExceptionHandler) {
this.operatorSnapshotsInProgress = checkNotNull(operatorSnapshotsInProgress);
this.checkpointMetaData = checkNotNull(checkpointMetaData);
this.checkpointMetrics = checkNotNull(checkpointMetrics);
this.asyncStartNanos = asyncStartNanos;
this.taskName = checkNotNull(taskName);
this.closeableRegistry = checkNotNull(closeableRegistry);
this.taskEnvironment = checkNotNull(taskEnvironment);
this.asyncExceptionHandler = checkNotNull(asyncExceptionHandler);
}
@Override
public void run() {
FileSystemSafetyNet.initializeSafetyNetForThread();
try {
closeableRegistry.registerCloseable(this);
TaskStateSnapshot jobManagerTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
TaskStateSnapshot localTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
for (Map.Entry<OperatorID, OperatorSnapshotFutures> entry : operatorSnapshotsInProgress.entrySet()) {
OperatorID operatorID = entry.getKey();
OperatorSnapshotFutures snapshotInProgress = entry.getValue();
OperatorSnapshotFinalizer finalizedSnapshots =
new OperatorSnapshotFinalizer(snapshotInProgress);
jobManagerTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getJobManagerOwnedState());
localTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getTaskLocalState());
}
final long asyncEndNanos = System.nanoTime();
final long asyncDurationMillis = (asyncEndNanos - asyncStartNanos) / 1_000_000L;
checkpointMetrics.setAsyncDurationMillis(asyncDurationMillis);
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.COMPLETED)) {
reportCompletedSnapshotStates(
jobManagerTaskOperatorSubtaskStates,
localTaskOperatorSubtaskStates,
asyncDurationMillis);
} else {
StreamTask.LOG.debug("{} - asynchronous part of checkpoint {} could not be completed because it was closed before.",
taskName,
checkpointMetaData.getCheckpointId());
}
} catch (Exception e) {
if (StreamTask.LOG.isDebugEnabled()) {
StreamTask.LOG.debug("{} - asynchronous part of checkpoint {} could not be completed.",
taskName,
checkpointMetaData.getCheckpointId(),
e);
}
handleExecutionException(e);
} finally {
closeableRegistry.unregisterCloseable(this);
FileSystemSafetyNet.closeSafetyNetAndGuardedResourcesForThread();
}
}
private void reportCompletedSnapshotStates(
TaskStateSnapshot acknowledgedTaskStateSnapshot,
TaskStateSnapshot localTaskStateSnapshot,
long asyncDurationMillis) {
boolean hasAckState = acknowledgedTaskStateSnapshot.hasState();
boolean hasLocalState = localTaskStateSnapshot.hasState();
Preconditions.checkState(hasAckState || !hasLocalState,
"Found cached state but no corresponding primary state is reported to the job " +
"manager. This indicates a problem.");
taskEnvironment.getTaskStateManager().reportTaskStateSnapshots(
checkpointMetaData,
checkpointMetrics,
hasAckState ? acknowledgedTaskStateSnapshot : null,
hasLocalState ? localTaskStateSnapshot : null);
StreamTask.LOG.debug("{} - finished asynchronous part of checkpoint {}. Asynchronous duration: {} ms",
taskName, checkpointMetaData.getCheckpointId(), asyncDurationMillis);
StreamTask.LOG.trace("{} - reported the following states in snapshot for checkpoint {}: {}.",
taskName, checkpointMetaData.getCheckpointId(), acknowledgedTaskStateSnapshot);
}
private void handleExecutionException(Exception e) {
boolean didCleanup = false;
AsyncCheckpointState currentState = asyncCheckpointState.get();
while (AsyncCheckpointState.DISCARDED != currentState) {
if (asyncCheckpointState.compareAndSet(currentState, AsyncCheckpointState.DISCARDED)) {
didCleanup = true;
try {
cleanup();
} catch (Exception cleanupException) {
e.addSuppressed(cleanupException);
}
Exception checkpointException = new Exception(
"Could not materialize checkpoint " + checkpointMetaData.getCheckpointId() + " for operator " +
taskName + '.',
e);
try {
taskEnvironment.declineCheckpoint(checkpointMetaData.getCheckpointId(), checkpointException);
} catch (Exception unhandled) {
AsynchronousException asyncException = new AsynchronousException(unhandled);
asyncExceptionHandler.handleAsyncException("Failure in asynchronous checkpoint materialization", asyncException);
}
currentState = AsyncCheckpointState.DISCARDED;
} else {
currentState = asyncCheckpointState.get();
}
}
if (!didCleanup) {
StreamTask.LOG.trace("Caught followup exception from a failed checkpoint thread. This can be ignored.", e);
}
}
@Override
public void close() {
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.DISCARDED)) {
try {
cleanup();
} catch (Exception cleanupException) {
StreamTask.LOG.warn("Could not properly clean up the async checkpoint runnable.", cleanupException);
}
} else {
logFailedCleanupAttempt();
}
}
private void logFailedCleanupAttempt() {
StreamTask.LOG.debug("{} - asynchronous checkpointing operation for checkpoint {} has " +
"already been completed. Thus, the state handles are not cleaned up.",
taskName,
checkpointMetaData.getCheckpointId());
}
} | class AsyncCheckpointRunnable implements Runnable, Closeable {
public static final Logger LOG = LoggerFactory.getLogger(AsyncCheckpointRunnable.class);
private final String taskName;
private final CloseableRegistry closeableRegistry;
private final Environment taskEnvironment;
private enum AsyncCheckpointState {
RUNNING,
DISCARDED,
COMPLETED
}
private final AsyncExceptionHandler asyncExceptionHandler;
private final Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress;
private final CheckpointMetaData checkpointMetaData;
private final CheckpointMetrics checkpointMetrics;
private final long asyncStartNanos;
private final AtomicReference<AsyncCheckpointState> asyncCheckpointState = new AtomicReference<>(AsyncCheckpointState.RUNNING);
AsyncCheckpointRunnable(
Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress,
CheckpointMetaData checkpointMetaData,
CheckpointMetrics checkpointMetrics,
long asyncStartNanos,
String taskName,
CloseableRegistry closeableRegistry,
Environment taskEnvironment,
AsyncExceptionHandler asyncExceptionHandler) {
this.operatorSnapshotsInProgress = checkNotNull(operatorSnapshotsInProgress);
this.checkpointMetaData = checkNotNull(checkpointMetaData);
this.checkpointMetrics = checkNotNull(checkpointMetrics);
this.asyncStartNanos = asyncStartNanos;
this.taskName = checkNotNull(taskName);
this.closeableRegistry = checkNotNull(closeableRegistry);
this.taskEnvironment = checkNotNull(taskEnvironment);
this.asyncExceptionHandler = checkNotNull(asyncExceptionHandler);
}
@Override
public void run() {
FileSystemSafetyNet.initializeSafetyNetForThread();
try {
closeableRegistry.registerCloseable(this);
TaskStateSnapshot jobManagerTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
TaskStateSnapshot localTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
for (Map.Entry<OperatorID, OperatorSnapshotFutures> entry : operatorSnapshotsInProgress.entrySet()) {
OperatorID operatorID = entry.getKey();
OperatorSnapshotFutures snapshotInProgress = entry.getValue();
OperatorSnapshotFinalizer finalizedSnapshots =
new OperatorSnapshotFinalizer(snapshotInProgress);
jobManagerTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getJobManagerOwnedState());
localTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getTaskLocalState());
}
final long asyncEndNanos = System.nanoTime();
final long asyncDurationMillis = (asyncEndNanos - asyncStartNanos) / 1_000_000L;
checkpointMetrics.setAsyncDurationMillis(asyncDurationMillis);
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.COMPLETED)) {
reportCompletedSnapshotStates(
jobManagerTaskOperatorSubtaskStates,
localTaskOperatorSubtaskStates,
asyncDurationMillis);
} else {
LOG.debug("{} - asynchronous part of checkpoint {} could not be completed because it was closed before.",
taskName,
checkpointMetaData.getCheckpointId());
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} - asynchronous part of checkpoint {} could not be completed.",
taskName,
checkpointMetaData.getCheckpointId(),
e);
}
handleExecutionException(e);
} finally {
closeableRegistry.unregisterCloseable(this);
FileSystemSafetyNet.closeSafetyNetAndGuardedResourcesForThread();
}
}
private void reportCompletedSnapshotStates(
TaskStateSnapshot acknowledgedTaskStateSnapshot,
TaskStateSnapshot localTaskStateSnapshot,
long asyncDurationMillis) {
boolean hasAckState = acknowledgedTaskStateSnapshot.hasState();
boolean hasLocalState = localTaskStateSnapshot.hasState();
Preconditions.checkState(hasAckState || !hasLocalState,
"Found cached state but no corresponding primary state is reported to the job " +
"manager. This indicates a problem.");
taskEnvironment.getTaskStateManager().reportTaskStateSnapshots(
checkpointMetaData,
checkpointMetrics,
hasAckState ? acknowledgedTaskStateSnapshot : null,
hasLocalState ? localTaskStateSnapshot : null);
LOG.debug("{} - finished asynchronous part of checkpoint {}. Asynchronous duration: {} ms",
taskName, checkpointMetaData.getCheckpointId(), asyncDurationMillis);
LOG.trace("{} - reported the following states in snapshot for checkpoint {}: {}.",
taskName, checkpointMetaData.getCheckpointId(), acknowledgedTaskStateSnapshot);
}
private void handleExecutionException(Exception e) {
boolean didCleanup = false;
AsyncCheckpointState currentState = asyncCheckpointState.get();
while (AsyncCheckpointState.DISCARDED != currentState) {
if (asyncCheckpointState.compareAndSet(currentState, AsyncCheckpointState.DISCARDED)) {
didCleanup = true;
try {
cleanup();
} catch (Exception cleanupException) {
e.addSuppressed(cleanupException);
}
Exception checkpointException = new Exception(
"Could not materialize checkpoint " + checkpointMetaData.getCheckpointId() + " for operator " +
taskName + '.',
e);
try {
taskEnvironment.declineCheckpoint(checkpointMetaData.getCheckpointId(), checkpointException);
} catch (Exception unhandled) {
AsynchronousException asyncException = new AsynchronousException(unhandled);
asyncExceptionHandler.handleAsyncException("Failure in asynchronous checkpoint materialization", asyncException);
}
currentState = AsyncCheckpointState.DISCARDED;
} else {
currentState = asyncCheckpointState.get();
}
}
if (!didCleanup) {
LOG.trace("Caught followup exception from a failed checkpoint thread. This can be ignored.", e);
}
}
@Override
public void close() {
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.DISCARDED)) {
try {
cleanup();
} catch (Exception cleanupException) {
LOG.warn("Could not properly clean up the async checkpoint runnable.", cleanupException);
}
} else {
logFailedCleanupAttempt();
}
}
private void logFailedCleanupAttempt() {
LOG.debug("{} - asynchronous checkpointing operation for checkpoint {} has " +
"already been completed. Thus, the state handles are not cleaned up.",
taskName,
checkpointMetaData.getCheckpointId());
}
} |
I can go either way. It seemed like something a customer might be curious about so I went with INFO. I think of VERBOSE as for SDK devs basically. | public MsalToken authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) {
PublicClientApplication pc = getPublicClientInstance(request).getValue();
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
if (account != null) {
parametersBuilder = parametersBuilder.account(account);
}
parametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
try {
MsalToken accessToken = new MsalToken(pc.acquireTokenSilently(parametersBuilder.build()).get());
if (OffsetDateTime.now().isBefore(accessToken.getExpiresAt().minus(REFRESH_OFFSET))) {
return accessToken;
}
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e.getMessage(), e));
} catch (ExecutionException | InterruptedException e) {
if (e.getMessage().contains("Token not found in the cache")) {
LOGGER.info("Token not found in the MSAL cache.");
return null;
} else {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(e.getMessage(), null, e));
}
}
SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes())).forceRefresh(true);
if (request.isCaeEnabled() && request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
if (account != null) {
forceParametersBuilder = forceParametersBuilder.account(account);
}
forceParametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
try {
return new MsalToken(pc.acquireTokenSilently(forceParametersBuilder.build()).get());
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e.getMessage(), e));
} catch (ExecutionException | InterruptedException e) {
if (e.getMessage().contains("Token not found in the cache")) {
LOGGER.info("Token not found in the MSAL cache.");
return null;
} else {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(e.getMessage(), null, e));
}
}
} | LOGGER.info("Token not found in the MSAL cache."); | public MsalToken authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) {
PublicClientApplication pc = getPublicClientInstance(request).getValue();
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
if (account != null) {
parametersBuilder = parametersBuilder.account(account);
}
parametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
try {
MsalToken accessToken = new MsalToken(pc.acquireTokenSilently(parametersBuilder.build()).get());
if (OffsetDateTime.now().isBefore(accessToken.getExpiresAt().minus(REFRESH_OFFSET))) {
return accessToken;
}
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e.getMessage(), e));
} catch (ExecutionException | InterruptedException e) {
if (e.getMessage().contains("Token not found in the cache")) {
LOGGER.verbose("Token not found in the MSAL cache.");
return null;
} else {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(e.getMessage(), null, e));
}
}
SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes())).forceRefresh(true);
if (request.isCaeEnabled() && request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
if (account != null) {
forceParametersBuilder = forceParametersBuilder.account(account);
}
forceParametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
try {
return new MsalToken(pc.acquireTokenSilently(forceParametersBuilder.build()).get());
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e.getMessage(), e));
} catch (ExecutionException | InterruptedException e) {
if (e.getMessage().contains("Token not found in the cache")) {
LOGGER.verbose("Token not found in the MSAL cache.");
return null;
} else {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(e.getMessage(), null, e));
}
}
} | class IdentitySyncClient extends IdentityClientBase {
private final SynchronousAccessor<PublicClientApplication> publicClientApplicationAccessor;
private final SynchronousAccessor<PublicClientApplication> publicClientApplicationAccessorWithCae;
private final SynchronousAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor;
private final SynchronousAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessorWithCae;
private final SynchronousAccessor<ConfidentialClientApplication> managedIdentityConfidentialClientApplicationAccessor;
private final SynchronousAccessor<ConfidentialClientApplication> workloadIdentityConfidentialClientApplicationAccessor;
private final SynchronousAccessor<String> clientAssertionAccessor;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param resourceId the resource ID of the application
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param clientAssertionTimeout the timeout to use for the client assertion.
* @param options the options configuring the client.
*/
IdentitySyncClient(String tenantId, String clientId, String clientSecret, String certificatePath,
String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier,
byte[] certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
Duration clientAssertionTimeout, IdentityClientOptions options) {
super(tenantId, clientId, clientSecret, certificatePath, clientAssertionFilePath, resourceId, clientAssertionSupplier,
certificate, certificatePassword, isSharedTokenCacheCredential, clientAssertionTimeout, options);
this.publicClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getPublicClient(isSharedTokenCacheCredential, false));
this.publicClientApplicationAccessorWithCae = new SynchronousAccessor<>(() ->
this.getPublicClient(isSharedTokenCacheCredential, true));
this.confidentialClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getConfidentialClient(false));
this.confidentialClientApplicationAccessorWithCae = new SynchronousAccessor<>(() ->
this.getConfidentialClient(true));
this.managedIdentityConfidentialClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getManagedIdentityConfidentialClient());
this.workloadIdentityConfidentialClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getWorkloadIdentityConfidentialClient());
this.clientAssertionAccessor = clientAssertionTimeout == null
? new SynchronousAccessor<>(() -> parseClientAssertion(), Duration.ofMinutes(5))
: new SynchronousAccessor<>(() -> parseClientAssertion(), clientAssertionTimeout);
}
private String parseClientAssertion() {
if (clientAssertionFilePath != null) {
try {
byte[] encoded = Files.readAllBytes(Paths.get(clientAssertionFilePath));
return new String(encoded, StandardCharsets.UTF_8);
} catch (IOException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
} else {
throw LOGGER.logExceptionAsError(new IllegalStateException(
"Client Assertion File Path is not provided."
+ " It should be provided to authenticate with client assertion."
));
}
}
/**
* Asynchronously acquire a token from Active Directory with a client secret.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithConfidentialClient(TokenRequestContext request) {
ConfidentialClientApplication confidentialClient = getConfidentialClientInstance(request).getValue();
ClientCredentialParameters.ClientCredentialParametersBuilder builder =
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (clientAssertionSupplier != null) {
builder.clientCredential(ClientCredentialFactory
.createFromClientAssertion(clientAssertionSupplier.get()));
}
try {
return new MsalToken(confidentialClient.acquireToken(builder.build()).get());
} catch (InterruptedException | ExecutionException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
}
private SynchronousAccessor<ConfidentialClientApplication> getConfidentialClientInstance(TokenRequestContext request) {
return request.isCaeEnabled()
? confidentialClientApplicationAccessorWithCae : confidentialClientApplicationAccessor;
}
private SynchronousAccessor<PublicClientApplication> getPublicClientInstance(TokenRequestContext request) {
return request.isCaeEnabled()
? publicClientApplicationAccessorWithCae : publicClientApplicationAccessor;
}
public AccessToken authenticateWithManagedIdentityConfidentialClient(TokenRequestContext request) {
ConfidentialClientApplication confidentialClient = managedIdentityConfidentialClientApplicationAccessor.getValue();
ClientCredentialParameters.ClientCredentialParametersBuilder builder =
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
try {
return new MsalToken(confidentialClient.acquireToken(builder.build()).get());
} catch (Exception e) {
throw new CredentialUnavailableException("Managed Identity authentication is not available.", e);
}
}
/**
* Acquire a token from the confidential client.
*
* @param request the details of the token request
* @return An access token, or null if no token exists in the cache.
*/
@SuppressWarnings("deprecation")
public AccessToken authenticateWithConfidentialClientCache(TokenRequestContext request) {
ConfidentialClientApplication confidentialClientApplication = getConfidentialClientInstance(request).getValue();
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil.resolveTenantId(tenantId, request, options));
if (request.isCaeEnabled() && request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
try {
IAuthenticationResult authenticationResult = confidentialClientApplication.acquireTokenSilently(parametersBuilder.build()).get();
AccessToken accessToken = new MsalToken(authenticationResult);
if (OffsetDateTime.now().isBefore(accessToken.getExpiresAt().minus(REFRESH_OFFSET))) {
return accessToken;
} else {
throw new IllegalStateException("Received token is close to expiry.");
}
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e.getMessage(), e));
} catch (ExecutionException | InterruptedException e) {
if (e.getMessage().contains("Token not found in the cache")) {
LOGGER.info("Token not found in the MSAL cache.");
return null;
} else {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(e.getMessage(), null, e));
}
}
}
/**
* Acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @param account the account used to log in to acquire the last token
* @return An access token, or null if no token exists in the cache.
*/
@SuppressWarnings("deprecation")
/**
* Asynchronously acquire a token from Active Directory with a username and a password.
*
* @param request the details of the token request
* @param username the username of the user
* @param password the password of the user
* @return a Publisher that emits an AccessToken
*/
public MsalToken authenticateWithUsernamePassword(TokenRequestContext request,
String username, String password) {
PublicClientApplication pc = getPublicClientInstance(request).getValue();
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
buildUsernamePasswordFlowParameters(request, username, password);
try {
return new MsalToken(pc.acquireToken(userNamePasswordParametersBuilder.build()).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to acquire token with username and "
+ "password. To mitigate this issue, please refer to the troubleshooting guidelines "
+ "here at https:
null, e));
}
}
/**
* Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide
* a device code for login and the user must meet the challenge by authenticating in a browser on the current or a
* different device.
*
* @param request the details of the token request
* @param deviceCodeConsumer the user provided closure that will consume the device code challenge
* @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device
* code expires
*/
public MsalToken authenticateWithDeviceCode(TokenRequestContext request,
Consumer<DeviceCodeInfo> deviceCodeConsumer) {
PublicClientApplication pc = getPublicClientInstance(request).getValue();
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder = buildDeviceCodeFlowParameters(request, deviceCodeConsumer);
try {
return new MsalToken(pc.acquireToken(parametersBuilder.build()).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to acquire token with device code.", null, e));
}
}
/**
* Synchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The
* credential will run a minimal local HttpServer at the given port, so {@code http:
* listed as a valid reply URL for the application.
*
* @param request the details of the token request
* @param port the port on which the HTTP server is listening
* @param redirectUrl the redirect URL to listen on and receive security code
* @param loginHint the username suggestion to pre-fill the login page's username/email address field
* @return a Publisher that emits an AccessToken
*/
public MsalToken authenticateWithBrowserInteraction(TokenRequestContext request, Integer port,
String redirectUrl, String loginHint) {
URI redirectUri;
String redirect;
if (port != null) {
redirect = HTTP_LOCALHOST + ":" + port;
} else if (redirectUrl != null) {
redirect = redirectUrl;
} else {
redirect = HTTP_LOCALHOST;
}
try {
redirectUri = new URI(redirect);
} catch (URISyntaxException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
buildInteractiveRequestParameters(request, loginHint, redirectUri);
PublicClientApplication pc = getPublicClientInstance(request).getValue();
try {
return new MsalToken(pc.acquireToken(builder.build()).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Failed to acquire token with Interactive Browser Authentication.", null, e));
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithAzureCli(TokenRequestContext request) {
StringBuilder azCommand = new StringBuilder("az account get-access-token --output json --resource ");
String scopes = ScopeUtil.scopesToResource(request.getScopes());
try {
ScopeUtil.validateScope(scopes);
} catch (IllegalArgumentException ex) {
throw LOGGER.logExceptionAsError(ex);
}
azCommand.append(scopes);
String tenant = IdentityUtil.resolveTenantId(tenantId, request, options);
ValidationUtil.validateTenantIdCharacterRange(tenant, LOGGER);
if (!CoreUtils.isNullOrEmpty(tenant) && !tenant.equals(IdentityUtil.DEFAULT_TENANT)) {
azCommand.append(" --tenant ").append(tenant);
}
try {
return getTokenFromAzureCLIAuthentication(azCommand);
} catch (RuntimeException e) {
throw (e instanceof CredentialUnavailableException
? LoggingUtil.logCredentialUnavailableException(LOGGER, options, (CredentialUnavailableException) e)
: LOGGER.logExceptionAsError(e));
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure Developer CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithAzureDeveloperCli(TokenRequestContext request) {
StringBuilder azdCommand = new StringBuilder("azd auth token --output json --scope ");
List<String> scopes = request.getScopes();
if (scopes.size() == 0) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Missing scope in request"));
}
scopes.forEach(scope -> {
try {
ScopeUtil.validateScope(scope);
} catch (IllegalArgumentException ex) {
throw LOGGER.logExceptionAsError(ex);
}
});
azdCommand.append(String.join(" --scope ", scopes));
String tenant = IdentityUtil.resolveTenantId(tenantId, request, options);
ValidationUtil.validateTenantIdCharacterRange(tenant, LOGGER);
if (!CoreUtils.isNullOrEmpty(tenant) && !tenant.equals(IdentityUtil.DEFAULT_TENANT)) {
azdCommand.append(" --tenant-id ").append(tenant);
}
try {
return getTokenFromAzureDeveloperCLIAuthentication(azdCommand);
} catch (RuntimeException e) {
throw (e instanceof CredentialUnavailableException
? LoggingUtil.logCredentialUnavailableException(LOGGER, options, (CredentialUnavailableException) e)
: LOGGER.logExceptionAsError(e));
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure PowerShell.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithOBO(TokenRequestContext request) {
ConfidentialClientApplication cc = getConfidentialClientInstance(request).getValue();
try {
return new MsalToken(cc.acquireToken(buildOBOFlowParameters(request)).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to acquire token with On Behalf Of Authentication.", null, e));
}
}
public AccessToken authenticateWithExchangeTokenSync(TokenRequestContext request) {
try {
String assertionToken = clientAssertionAccessor.getValue();
return authenticateWithExchangeTokenHelper(request, assertionToken);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
Function<AppTokenProviderParameters, CompletableFuture<TokenProviderResult>> getWorkloadIdentityTokenProvider() {
return appTokenProviderParameters -> {
TokenRequestContext trc = new TokenRequestContext()
.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))
.setClaims(appTokenProviderParameters.claims)
.setTenantId(appTokenProviderParameters.tenantId);
AccessToken accessToken = authenticateWithExchangeTokenSync(trc);
Supplier<TokenProviderResult> tokenProviderResultSupplier = () -> {
TokenProviderResult result = new TokenProviderResult();
result.setAccessToken(accessToken.getToken());
result.setTenantId(trc.getTenantId());
result.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());
return result;
};
return options.getExecutorService() != null
? CompletableFuture.supplyAsync(tokenProviderResultSupplier, options.getExecutorService())
: CompletableFuture.supplyAsync(tokenProviderResultSupplier);
};
}
public AccessToken authenticateWithWorkloadIdentityConfidentialClient(TokenRequestContext request) {
ConfidentialClientApplication confidentialClient =
workloadIdentityConfidentialClientApplicationAccessor.getValue();
try {
ClientCredentialParameters.ClientCredentialParametersBuilder builder =
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
return new MsalToken(confidentialClient.acquireToken(builder.build()).get());
} catch (Exception e) {
throw new CredentialUnavailableException("Managed Identity authentication is not available.", e);
}
}
/**
* Get the configured identity client options.
*
* @return the client options.
*/
public IdentityClientOptions getIdentityClientOptions() {
return options;
}
@Override
Mono<AccessToken> getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext) {
return null;
}
} | class IdentitySyncClient extends IdentityClientBase {
private final SynchronousAccessor<PublicClientApplication> publicClientApplicationAccessor;
private final SynchronousAccessor<PublicClientApplication> publicClientApplicationAccessorWithCae;
private final SynchronousAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor;
private final SynchronousAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessorWithCae;
private final SynchronousAccessor<ConfidentialClientApplication> managedIdentityConfidentialClientApplicationAccessor;
private final SynchronousAccessor<ConfidentialClientApplication> workloadIdentityConfidentialClientApplicationAccessor;
private final SynchronousAccessor<String> clientAssertionAccessor;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param resourceId the resource ID of the application
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param clientAssertionTimeout the timeout to use for the client assertion.
* @param options the options configuring the client.
*/
IdentitySyncClient(String tenantId, String clientId, String clientSecret, String certificatePath,
String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier,
byte[] certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
Duration clientAssertionTimeout, IdentityClientOptions options) {
super(tenantId, clientId, clientSecret, certificatePath, clientAssertionFilePath, resourceId, clientAssertionSupplier,
certificate, certificatePassword, isSharedTokenCacheCredential, clientAssertionTimeout, options);
this.publicClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getPublicClient(isSharedTokenCacheCredential, false));
this.publicClientApplicationAccessorWithCae = new SynchronousAccessor<>(() ->
this.getPublicClient(isSharedTokenCacheCredential, true));
this.confidentialClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getConfidentialClient(false));
this.confidentialClientApplicationAccessorWithCae = new SynchronousAccessor<>(() ->
this.getConfidentialClient(true));
this.managedIdentityConfidentialClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getManagedIdentityConfidentialClient());
this.workloadIdentityConfidentialClientApplicationAccessor = new SynchronousAccessor<>(() ->
this.getWorkloadIdentityConfidentialClient());
this.clientAssertionAccessor = clientAssertionTimeout == null
? new SynchronousAccessor<>(() -> parseClientAssertion(), Duration.ofMinutes(5))
: new SynchronousAccessor<>(() -> parseClientAssertion(), clientAssertionTimeout);
}
private String parseClientAssertion() {
if (clientAssertionFilePath != null) {
try {
byte[] encoded = Files.readAllBytes(Paths.get(clientAssertionFilePath));
return new String(encoded, StandardCharsets.UTF_8);
} catch (IOException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
} else {
throw LOGGER.logExceptionAsError(new IllegalStateException(
"Client Assertion File Path is not provided."
+ " It should be provided to authenticate with client assertion."
));
}
}
/**
* Asynchronously acquire a token from Active Directory with a client secret.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithConfidentialClient(TokenRequestContext request) {
ConfidentialClientApplication confidentialClient = getConfidentialClientInstance(request).getValue();
ClientCredentialParameters.ClientCredentialParametersBuilder builder =
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (clientAssertionSupplier != null) {
builder.clientCredential(ClientCredentialFactory
.createFromClientAssertion(clientAssertionSupplier.get()));
}
try {
return new MsalToken(confidentialClient.acquireToken(builder.build()).get());
} catch (InterruptedException | ExecutionException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
}
private SynchronousAccessor<ConfidentialClientApplication> getConfidentialClientInstance(TokenRequestContext request) {
return request.isCaeEnabled()
? confidentialClientApplicationAccessorWithCae : confidentialClientApplicationAccessor;
}
private SynchronousAccessor<PublicClientApplication> getPublicClientInstance(TokenRequestContext request) {
return request.isCaeEnabled()
? publicClientApplicationAccessorWithCae : publicClientApplicationAccessor;
}
public AccessToken authenticateWithManagedIdentityConfidentialClient(TokenRequestContext request) {
ConfidentialClientApplication confidentialClient = managedIdentityConfidentialClientApplicationAccessor.getValue();
ClientCredentialParameters.ClientCredentialParametersBuilder builder =
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
try {
return new MsalToken(confidentialClient.acquireToken(builder.build()).get());
} catch (Exception e) {
throw new CredentialUnavailableException("Managed Identity authentication is not available.", e);
}
}
/**
* Acquire a token from the confidential client.
*
* @param request the details of the token request
* @return An access token, or null if no token exists in the cache.
*/
@SuppressWarnings("deprecation")
public AccessToken authenticateWithConfidentialClientCache(TokenRequestContext request) {
ConfidentialClientApplication confidentialClientApplication = getConfidentialClientInstance(request).getValue();
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil.resolveTenantId(tenantId, request, options));
if (request.isCaeEnabled() && request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
try {
IAuthenticationResult authenticationResult = confidentialClientApplication.acquireTokenSilently(parametersBuilder.build()).get();
AccessToken accessToken = new MsalToken(authenticationResult);
if (OffsetDateTime.now().isBefore(accessToken.getExpiresAt().minus(REFRESH_OFFSET))) {
return accessToken;
} else {
throw new IllegalStateException("Received token is close to expiry.");
}
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e.getMessage(), e));
} catch (ExecutionException | InterruptedException e) {
if (e.getMessage().contains("Token not found in the cache")) {
LOGGER.verbose("Token not found in the MSAL cache.");
return null;
} else {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(e.getMessage(), null, e));
}
}
}
/**
* Acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @param account the account used to log in to acquire the last token
* @return An access token, or null if no token exists in the cache.
*/
@SuppressWarnings("deprecation")
/**
* Asynchronously acquire a token from Active Directory with a username and a password.
*
* @param request the details of the token request
* @param username the username of the user
* @param password the password of the user
* @return a Publisher that emits an AccessToken
*/
public MsalToken authenticateWithUsernamePassword(TokenRequestContext request,
String username, String password) {
PublicClientApplication pc = getPublicClientInstance(request).getValue();
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
buildUsernamePasswordFlowParameters(request, username, password);
try {
return new MsalToken(pc.acquireToken(userNamePasswordParametersBuilder.build()).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to acquire token with username and "
+ "password. To mitigate this issue, please refer to the troubleshooting guidelines "
+ "here at https:
null, e));
}
}
/**
* Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide
* a device code for login and the user must meet the challenge by authenticating in a browser on the current or a
* different device.
*
* @param request the details of the token request
* @param deviceCodeConsumer the user provided closure that will consume the device code challenge
* @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device
* code expires
*/
public MsalToken authenticateWithDeviceCode(TokenRequestContext request,
Consumer<DeviceCodeInfo> deviceCodeConsumer) {
PublicClientApplication pc = getPublicClientInstance(request).getValue();
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder = buildDeviceCodeFlowParameters(request, deviceCodeConsumer);
try {
return new MsalToken(pc.acquireToken(parametersBuilder.build()).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to acquire token with device code.", null, e));
}
}
/**
* Synchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The
* credential will run a minimal local HttpServer at the given port, so {@code http:
* listed as a valid reply URL for the application.
*
* @param request the details of the token request
* @param port the port on which the HTTP server is listening
* @param redirectUrl the redirect URL to listen on and receive security code
* @param loginHint the username suggestion to pre-fill the login page's username/email address field
* @return a Publisher that emits an AccessToken
*/
public MsalToken authenticateWithBrowserInteraction(TokenRequestContext request, Integer port,
String redirectUrl, String loginHint) {
URI redirectUri;
String redirect;
if (port != null) {
redirect = HTTP_LOCALHOST + ":" + port;
} else if (redirectUrl != null) {
redirect = redirectUrl;
} else {
redirect = HTTP_LOCALHOST;
}
try {
redirectUri = new URI(redirect);
} catch (URISyntaxException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(e));
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
buildInteractiveRequestParameters(request, loginHint, redirectUri);
PublicClientApplication pc = getPublicClientInstance(request).getValue();
try {
return new MsalToken(pc.acquireToken(builder.build()).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Failed to acquire token with Interactive Browser Authentication.", null, e));
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithAzureCli(TokenRequestContext request) {
StringBuilder azCommand = new StringBuilder("az account get-access-token --output json --resource ");
String scopes = ScopeUtil.scopesToResource(request.getScopes());
try {
ScopeUtil.validateScope(scopes);
} catch (IllegalArgumentException ex) {
throw LOGGER.logExceptionAsError(ex);
}
azCommand.append(scopes);
String tenant = IdentityUtil.resolveTenantId(tenantId, request, options);
ValidationUtil.validateTenantIdCharacterRange(tenant, LOGGER);
if (!CoreUtils.isNullOrEmpty(tenant) && !tenant.equals(IdentityUtil.DEFAULT_TENANT)) {
azCommand.append(" --tenant ").append(tenant);
}
try {
return getTokenFromAzureCLIAuthentication(azCommand);
} catch (RuntimeException e) {
throw (e instanceof CredentialUnavailableException
? LoggingUtil.logCredentialUnavailableException(LOGGER, options, (CredentialUnavailableException) e)
: LOGGER.logExceptionAsError(e));
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure Developer CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithAzureDeveloperCli(TokenRequestContext request) {
StringBuilder azdCommand = new StringBuilder("azd auth token --output json --scope ");
List<String> scopes = request.getScopes();
if (scopes.size() == 0) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Missing scope in request"));
}
scopes.forEach(scope -> {
try {
ScopeUtil.validateScope(scope);
} catch (IllegalArgumentException ex) {
throw LOGGER.logExceptionAsError(ex);
}
});
azdCommand.append(String.join(" --scope ", scopes));
String tenant = IdentityUtil.resolveTenantId(tenantId, request, options);
ValidationUtil.validateTenantIdCharacterRange(tenant, LOGGER);
if (!CoreUtils.isNullOrEmpty(tenant) && !tenant.equals(IdentityUtil.DEFAULT_TENANT)) {
azdCommand.append(" --tenant-id ").append(tenant);
}
try {
return getTokenFromAzureDeveloperCLIAuthentication(azdCommand);
} catch (RuntimeException e) {
throw (e instanceof CredentialUnavailableException
? LoggingUtil.logCredentialUnavailableException(LOGGER, options, (CredentialUnavailableException) e)
: LOGGER.logExceptionAsError(e));
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure PowerShell.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public AccessToken authenticateWithOBO(TokenRequestContext request) {
ConfidentialClientApplication cc = getConfidentialClientInstance(request).getValue();
try {
return new MsalToken(cc.acquireToken(buildOBOFlowParameters(request)).get());
} catch (Exception e) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to acquire token with On Behalf Of Authentication.", null, e));
}
}
public AccessToken authenticateWithExchangeTokenSync(TokenRequestContext request) {
try {
String assertionToken = clientAssertionAccessor.getValue();
return authenticateWithExchangeTokenHelper(request, assertionToken);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
Function<AppTokenProviderParameters, CompletableFuture<TokenProviderResult>> getWorkloadIdentityTokenProvider() {
return appTokenProviderParameters -> {
TokenRequestContext trc = new TokenRequestContext()
.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))
.setClaims(appTokenProviderParameters.claims)
.setTenantId(appTokenProviderParameters.tenantId);
AccessToken accessToken = authenticateWithExchangeTokenSync(trc);
Supplier<TokenProviderResult> tokenProviderResultSupplier = () -> {
TokenProviderResult result = new TokenProviderResult();
result.setAccessToken(accessToken.getToken());
result.setTenantId(trc.getTenantId());
result.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());
return result;
};
return options.getExecutorService() != null
? CompletableFuture.supplyAsync(tokenProviderResultSupplier, options.getExecutorService())
: CompletableFuture.supplyAsync(tokenProviderResultSupplier);
};
}
public AccessToken authenticateWithWorkloadIdentityConfidentialClient(TokenRequestContext request) {
ConfidentialClientApplication confidentialClient =
workloadIdentityConfidentialClientApplicationAccessor.getValue();
try {
ClientCredentialParameters.ClientCredentialParametersBuilder builder =
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
return new MsalToken(confidentialClient.acquireToken(builder.build()).get());
} catch (Exception e) {
throw new CredentialUnavailableException("Managed Identity authentication is not available.", e);
}
}
/**
* Get the configured identity client options.
*
* @return the client options.
*/
public IdentityClientOptions getIdentityClientOptions() {
return options;
}
@Override
Mono<AccessToken> getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext) {
return null;
}
} |
Now we need to check for null here too. | private void ensureClusterTableIsUpdated() {
try {
if (0 == engine.get().getStatus(newContext().getCairoSecurityContext(), new Path(), clusterTable.name)) {
}
} catch (Exception e) {
clusterTable.repair(e);
}
} | if (0 == engine.get().getStatus(newContext().getCairoSecurityContext(), new Path(), clusterTable.name)) { | private void ensureClusterTableIsUpdated() {
try {
if (0 == engine().getStatus(newContext().getCairoSecurityContext(), new Path(), clusterTable.name)) {
}
} catch (Exception e) {
clusterTable.repair(e);
}
} | class QuestMetricsDb extends AbstractComponent implements MetricsDb {
private static final Logger log = Logger.getLogger(QuestMetricsDb.class.getName());
private final Table nodeTable;
private final Table clusterTable;
private final Clock clock;
private final String dataDir;
private final AtomicReference<CairoEngine> engine = new AtomicReference<>();
private final ThreadLocal<SqlCompiler> sqlCompiler;
private final AtomicInteger nullRecords = new AtomicInteger();
@Inject
public QuestMetricsDb() {
this(Defaults.getDefaults().underVespaHome("var/db/vespa/autoscaling"), Clock.systemUTC());
}
public QuestMetricsDb(String dataDir, Clock clock) {
this.clock = clock;
if (dataDir.startsWith(Defaults.getDefaults().vespaHome())
&& ! new File(Defaults.getDefaults().vespaHome()).exists())
dataDir = "data";
String logConfig = dataDir + "/quest-log.conf";
IOUtils.createDirectory(logConfig);
IOUtils.writeFile(new File(logConfig), new byte[0]);
System.setProperty("out", logConfig);
this.dataDir = dataDir;
engine.set(new CairoEngine(new DefaultCairoConfiguration(dataDir)));
sqlCompiler = ThreadLocal.withInitial(() -> new SqlCompiler(engine.get()));
nodeTable = new Table(dataDir, "metrics", clock);
clusterTable = new Table(dataDir, "clusterMetrics", clock);
ensureTablesExist();
}
@Override
public Clock clock() { return clock; }
@Override
public void addNodeMetrics(Collection<Pair<String, NodeMetricSnapshot>> snapshots) {
try {
addNodeMetricsBody(snapshots);
}
catch (CairoException e) {
if (e.getMessage().contains("Cannot read offset")) {
nodeTable.repair(e);
addNodeMetricsBody(snapshots);
}
}
}
private void addNodeMetricsBody(Collection<Pair<String, NodeMetricSnapshot>> snapshots) {
synchronized (nodeTable.writeLock) {
try (TableWriter writer = nodeTable.getWriter()) {
for (var snapshot : snapshots) {
Optional<Long> atMillis = nodeTable.adjustOrDiscard(snapshot.getSecond().at());
if (atMillis.isEmpty()) continue;
TableWriter.Row row = writer.newRow(atMillis.get() * 1000);
row.putStr(0, snapshot.getFirst());
row.putFloat(2, (float) snapshot.getSecond().load().cpu());
row.putFloat(3, (float) snapshot.getSecond().load().memory());
row.putFloat(4, (float) snapshot.getSecond().load().disk());
row.putLong(5, snapshot.getSecond().generation());
row.putBool(6, snapshot.getSecond().inService());
row.putBool(7, snapshot.getSecond().stable());
row.putFloat(8, (float) snapshot.getSecond().queryRate());
row.append();
}
writer.commit();
}
}
}
@Override
public void addClusterMetrics(ApplicationId application, Map<ClusterSpec.Id, ClusterMetricSnapshot> snapshots) {
try {
addClusterMetricsBody(application, snapshots);
}
catch (CairoException e) {
if (e.getMessage().contains("Cannot read offset")) {
clusterTable.repair(e);
addClusterMetricsBody(application, snapshots);
}
}
}
private void addClusterMetricsBody(ApplicationId applicationId, Map<ClusterSpec.Id, ClusterMetricSnapshot> snapshots) {
synchronized (clusterTable.writeLock) {
try (TableWriter writer = clusterTable.getWriter()) {
for (var snapshot : snapshots.entrySet()) {
Optional<Long> atMillis = clusterTable.adjustOrDiscard(snapshot.getValue().at());
if (atMillis.isEmpty()) continue;
TableWriter.Row row = writer.newRow(atMillis.get() * 1000);
row.putStr(0, applicationId.serializedForm());
row.putStr(1, snapshot.getKey().value());
row.putFloat(3, (float) snapshot.getValue().queryRate());
row.putFloat(4, (float) snapshot.getValue().writeRate());
row.append();
}
writer.commit();
}
}
}
@Override
public List<NodeTimeseries> getNodeTimeseries(Duration period, Set<String> hostnames) {
try {
var snapshots = getNodeSnapshots(clock.instant().minus(period), hostnames, newContext());
return snapshots.entrySet().stream()
.map(entry -> new NodeTimeseries(entry.getKey(), entry.getValue()))
.collect(Collectors.toList());
}
catch (SqlException e) {
throw new IllegalStateException("Could not read node timeseries data in Quest stored in " + dataDir, e);
}
}
@Override
public ClusterTimeseries getClusterTimeseries(ApplicationId applicationId, ClusterSpec.Id clusterId) {
try {
return getClusterSnapshots(applicationId, clusterId);
}
catch (SqlException e) {
throw new IllegalStateException("Could not read cluster timeseries data in Quest stored in " + dataDir, e);
}
}
public int getNullRecordsCount() { return nullRecords.get(); }
@Override
public void gc() {
nullRecords.set(0);
nodeTable.gc();
clusterTable.gc();
}
@Override
public void deconstruct() { close(); }
@Override
public void close() {
synchronized (clusterTable.writeLock) {
CairoEngine myEngine = engine.getAndSet(null);
if (myEngine != null) {
myEngine.close();
}
}
}
private void ensureTablesExist() {
if (nodeTable.exists())
ensureNodeTableIsUpdated();
else
createNodeTable();
if (clusterTable.exists())
ensureClusterTableIsUpdated();
else
createClusterTable();
}
private void ensureNodeTableIsUpdated() {
try {
} catch (Exception e) {
nodeTable.repair(e);
}
}
private void createNodeTable() {
try {
issue("create table " + nodeTable.name +
" (hostname string, at timestamp, cpu_util float, mem_total_util float, disk_util float," +
" application_generation long, inService boolean, stable boolean, queries_rate float)" +
" timestamp(at)" +
"PARTITION BY DAY;",
newContext());
}
catch (SqlException e) {
throw new IllegalStateException("Could not create Quest db table '" + nodeTable.name + "'", e);
}
}
private void createClusterTable() {
try {
issue("create table " + clusterTable.name +
" (application string, cluster string, at timestamp, queries_rate float, write_rate float)" +
" timestamp(at)" +
"PARTITION BY DAY;",
newContext());
}
catch (SqlException e) {
throw new IllegalStateException("Could not create Quest db table '" + clusterTable.name + "'", e);
}
}
private ListMap<String, NodeMetricSnapshot> getNodeSnapshots(Instant startTime,
Set<String> hostnames,
SqlExecutionContext context) throws SqlException {
DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneId.of("UTC"));
String from = formatter.format(startTime).substring(0, 19) + ".000000Z";
String to = formatter.format(clock.instant()).substring(0, 19) + ".000000Z";
String sql = "select * from " + nodeTable.name + " where at between('" + from + "', '" + to + "');";
try (RecordCursorFactory factory = issue(sql, context).getRecordCursorFactory()) {
ListMap<String, NodeMetricSnapshot> snapshots = new ListMap<>();
try (RecordCursor cursor = factory.getCursor(context)) {
Record record = cursor.getRecord();
while (cursor.hasNext()) {
if (record == null || record.getStr(0) == null) {
nullRecords.incrementAndGet();
continue;
}
String hostname = record.getStr(0).toString();
if (hostnames.isEmpty() || hostnames.contains(hostname)) {
snapshots.put(hostname,
new NodeMetricSnapshot(Instant.ofEpochMilli(record.getTimestamp(1) / 1000),
new Load(record.getFloat(2),
record.getFloat(3),
record.getFloat(4)),
record.getLong(5),
record.getBool(6),
record.getBool(7),
record.getFloat(8)));
}
}
}
return snapshots;
}
}
private ClusterTimeseries getClusterSnapshots(ApplicationId application, ClusterSpec.Id cluster) throws SqlException {
String sql = "select * from " + clusterTable.name;
var context = newContext();
try (RecordCursorFactory factory = issue(sql, context).getRecordCursorFactory()) {
List<ClusterMetricSnapshot> snapshots = new ArrayList<>();
try (RecordCursor cursor = factory.getCursor(context)) {
Record record = cursor.getRecord();
while (cursor.hasNext()) {
String applicationIdString = record.getStr(0).toString();
if ( ! application.serializedForm().equals(applicationIdString)) continue;
String clusterId = record.getStr(1).toString();
if (cluster.value().equals(clusterId)) {
snapshots.add(new ClusterMetricSnapshot(Instant.ofEpochMilli(record.getTimestamp(2) / 1000),
record.getFloat(3),
record.getFloat(4)));
}
}
}
return new ClusterTimeseries(cluster, snapshots);
}
}
/** Issues an SQL statement against the QuestDb engine */
private CompiledQuery issue(String sql, SqlExecutionContext context) throws SqlException {
return sqlCompiler.get().compile(sql, context);
}
private SqlExecutionContext newContext() {
return new SqlExecutionContextImpl(engine.get(), 1);
}
/** A questDb table */
private class Table {
private final Object writeLock = new Object();
private final String name;
private final Clock clock;
private final File dir;
private long highestTimestampAdded = 0;
Table(String dataDir, String name, Clock clock) {
this.name = name;
this.clock = clock;
this.dir = new File(dataDir, name);
IOUtils.createDirectory(dir.getPath());
new File(dir + "/_txn_scoreboard").delete();
}
boolean exists() {
return 0 == engine.get().getStatus(newContext().getCairoSecurityContext(), new Path(), name);
}
TableWriter getWriter() {
return engine.get().getWriter(newContext().getCairoSecurityContext(), name);
}
void gc() {
synchronized (writeLock) {
Instant oldestToKeep = clock.instant().minus(Duration.ofDays(4));
SqlExecutionContext context = newContext();
int partitions = 0;
try {
List<String> removeList = new ArrayList<>();
for (String dirEntry : dir.list()) {
File partitionDir = new File(dir, dirEntry);
if (!partitionDir.isDirectory()) continue;
partitions++;
DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneId.of("UTC"));
Instant partitionDay = Instant.from(formatter.parse(dirEntry.substring(0, 10) + "T00:00:00"));
if (partitionDay.isBefore(oldestToKeep))
removeList.add(dirEntry);
}
if (removeList.size() < partitions && !removeList.isEmpty()) {
issue("alter table " + name + " drop partition list " +
removeList.stream().map(dir -> "'" + dir + "'").collect(Collectors.joining(",")),
context);
}
} catch (SqlException e) {
log.log(Level.WARNING, "Failed to gc old metrics data in " + dir + " table " + name, e);
}
}
}
/**
* Repairs this db on corruption.
*
* @param e the exception indicating corruption
*/
private void repair(Exception e) {
log.log(Level.WARNING, "QuestDb seems corrupted, wiping data and starting over", e);
IOUtils.recursiveDeleteDir(dir);
IOUtils.createDirectory(dir.getPath());
ensureTablesExist();
}
void ensureColumnExists(String column, String columnType) throws SqlException {
if (columnNames().contains(column)) return;
issue("alter table " + name + " add column " + column + " " + columnType, newContext());
}
private Optional<Long> adjustOrDiscard(Instant at) {
long timestamp = at.toEpochMilli();
if (timestamp >= highestTimestampAdded) {
highestTimestampAdded = timestamp;
return Optional.of(timestamp);
}
if (timestamp >= highestTimestampAdded - 60 * 1000) return Optional.of(highestTimestampAdded);
return Optional.empty();
}
private List<String> columnNames() throws SqlException {
var context = newContext();
List<String> columns = new ArrayList<>();
try (RecordCursorFactory factory = issue("show columns from " + name, context).getRecordCursorFactory()) {
try (RecordCursor cursor = factory.getCursor(context)) {
Record record = cursor.getRecord();
while (cursor.hasNext()) {
columns.add(record.getStr(0).toString());
}
}
}
return columns;
}
}
} | class QuestMetricsDb extends AbstractComponent implements MetricsDb {
private static final Logger log = Logger.getLogger(QuestMetricsDb.class.getName());
private final Table nodeTable;
private final Table clusterTable;
private final Clock clock;
private final String dataDir;
private final CairoEngine engine;
private final ConcurrentResourcePool<SqlCompiler> sqlCompilerPool;
private final AtomicBoolean closed = new AtomicBoolean(false);
@Inject
public QuestMetricsDb() {
this(Defaults.getDefaults().underVespaHome("var/db/vespa/autoscaling"), Clock.systemUTC());
}
public QuestMetricsDb(String dataDir, Clock clock) {
this.clock = clock;
if (dataDir.startsWith(Defaults.getDefaults().vespaHome())
&& ! new File(Defaults.getDefaults().vespaHome()).exists())
dataDir = "data";
String logConfig = dataDir + "/quest-log.conf";
IOUtils.createDirectory(logConfig);
IOUtils.writeFile(new File(logConfig), new byte[0]);
System.setProperty("out", logConfig);
this.dataDir = dataDir;
engine = new CairoEngine(new DefaultCairoConfiguration(dataDir));
sqlCompilerPool = new ConcurrentResourcePool<>(() -> new SqlCompiler(engine()));
nodeTable = new Table(dataDir, "metrics", clock);
clusterTable = new Table(dataDir, "clusterMetrics", clock);
ensureTablesExist();
}
private CairoEngine engine() {
if (closed.get())
throw new IllegalStateException("Attempted to access QuestDb after calling close");
return engine;
}
@Override
public Clock clock() { return clock; }
@Override
public void addNodeMetrics(Collection<Pair<String, NodeMetricSnapshot>> snapshots) {
try {
addNodeMetricsBody(snapshots);
}
catch (CairoException e) {
if (e.getMessage().contains("Cannot read offset")) {
nodeTable.repair(e);
addNodeMetricsBody(snapshots);
}
}
}
private void addNodeMetricsBody(Collection<Pair<String, NodeMetricSnapshot>> snapshots) {
synchronized (nodeTable.writeLock) {
try (TableWriter writer = nodeTable.getWriter()) {
for (var snapshot : snapshots) {
Optional<Long> atMillis = nodeTable.adjustOrDiscard(snapshot.getSecond().at());
if (atMillis.isEmpty()) continue;
TableWriter.Row row = writer.newRow(atMillis.get() * 1000);
row.putStr(0, snapshot.getFirst());
row.putFloat(2, (float) snapshot.getSecond().load().cpu());
row.putFloat(3, (float) snapshot.getSecond().load().memory());
row.putFloat(4, (float) snapshot.getSecond().load().disk());
row.putLong(5, snapshot.getSecond().generation());
row.putBool(6, snapshot.getSecond().inService());
row.putBool(7, snapshot.getSecond().stable());
row.putFloat(8, (float) snapshot.getSecond().queryRate());
row.append();
}
writer.commit();
}
}
}
@Override
public void addClusterMetrics(ApplicationId application, Map<ClusterSpec.Id, ClusterMetricSnapshot> snapshots) {
try {
addClusterMetricsBody(application, snapshots);
}
catch (CairoException e) {
if (e.getMessage().contains("Cannot read offset")) {
clusterTable.repair(e);
addClusterMetricsBody(application, snapshots);
}
}
}
private void addClusterMetricsBody(ApplicationId applicationId, Map<ClusterSpec.Id, ClusterMetricSnapshot> snapshots) {
synchronized (clusterTable.writeLock) {
try (TableWriter writer = clusterTable.getWriter()) {
for (var snapshot : snapshots.entrySet()) {
Optional<Long> atMillis = clusterTable.adjustOrDiscard(snapshot.getValue().at());
if (atMillis.isEmpty()) continue;
TableWriter.Row row = writer.newRow(atMillis.get() * 1000);
row.putStr(0, applicationId.serializedForm());
row.putStr(1, snapshot.getKey().value());
row.putFloat(3, (float) snapshot.getValue().queryRate());
row.putFloat(4, (float) snapshot.getValue().writeRate());
row.append();
}
writer.commit();
}
}
}
@Override
public List<NodeTimeseries> getNodeTimeseries(Duration period, Set<String> hostnames) {
try {
var snapshots = getNodeSnapshots(clock.instant().minus(period), hostnames, newContext());
return snapshots.entrySet().stream()
.map(entry -> new NodeTimeseries(entry.getKey(), entry.getValue()))
.collect(Collectors.toList());
}
catch (SqlException e) {
throw new IllegalStateException("Could not read node timeseries data in Quest stored in " + dataDir, e);
}
}
@Override
public ClusterTimeseries getClusterTimeseries(ApplicationId applicationId, ClusterSpec.Id clusterId) {
try {
return getClusterSnapshots(applicationId, clusterId);
}
catch (SqlException e) {
throw new IllegalStateException("Could not read cluster timeseries data in Quest stored in " + dataDir, e);
}
}
@Override
public void gc() {
nodeTable.gc();
clusterTable.gc();
}
@Override
public void deconstruct() { close(); }
@Override
public void close() {
if (closed.getAndSet(true)) return;
synchronized (nodeTable.writeLock) {
synchronized (clusterTable.writeLock) {
for (SqlCompiler sqlCompiler : sqlCompilerPool)
sqlCompiler.close();
engine.close();
}
}
}
private void ensureTablesExist() {
if (nodeTable.exists())
ensureNodeTableIsUpdated();
else
createNodeTable();
if (clusterTable.exists())
ensureClusterTableIsUpdated();
else
createClusterTable();
}
private void ensureNodeTableIsUpdated() {
try {
} catch (Exception e) {
nodeTable.repair(e);
}
}
private void createNodeTable() {
try {
issue("create table " + nodeTable.name +
" (hostname string, at timestamp, cpu_util float, mem_total_util float, disk_util float," +
" application_generation long, inService boolean, stable boolean, queries_rate float)" +
" timestamp(at)" +
"PARTITION BY DAY;",
newContext());
}
catch (SqlException e) {
throw new IllegalStateException("Could not create Quest db table '" + nodeTable.name + "'", e);
}
}
private void createClusterTable() {
try {
issue("create table " + clusterTable.name +
" (application string, cluster string, at timestamp, queries_rate float, write_rate float)" +
" timestamp(at)" +
"PARTITION BY DAY;",
newContext());
}
catch (SqlException e) {
throw new IllegalStateException("Could not create Quest db table '" + clusterTable.name + "'", e);
}
}
private ListMap<String, NodeMetricSnapshot> getNodeSnapshots(Instant startTime,
Set<String> hostnames,
SqlExecutionContext context) throws SqlException {
DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneId.of("UTC"));
String from = formatter.format(startTime).substring(0, 19) + ".000000Z";
String to = formatter.format(clock.instant()).substring(0, 19) + ".000000Z";
String sql = "select * from " + nodeTable.name + " where at between('" + from + "', '" + to + "');";
try (RecordCursorFactory factory = issue(sql, context).getRecordCursorFactory()) {
ListMap<String, NodeMetricSnapshot> snapshots = new ListMap<>();
try (RecordCursor cursor = factory.getCursor(context)) {
Record record = cursor.getRecord();
while (cursor.hasNext()) {
String hostname = record.getStr(0).toString();
if (hostnames.isEmpty() || hostnames.contains(hostname)) {
snapshots.put(hostname,
new NodeMetricSnapshot(Instant.ofEpochMilli(record.getTimestamp(1) / 1000),
new Load(record.getFloat(2),
record.getFloat(3),
record.getFloat(4)),
record.getLong(5),
record.getBool(6),
record.getBool(7),
record.getFloat(8)));
}
}
}
return snapshots;
}
}
private ClusterTimeseries getClusterSnapshots(ApplicationId application, ClusterSpec.Id cluster) throws SqlException {
String sql = "select * from " + clusterTable.name;
var context = newContext();
try (RecordCursorFactory factory = issue(sql, context).getRecordCursorFactory()) {
List<ClusterMetricSnapshot> snapshots = new ArrayList<>();
try (RecordCursor cursor = factory.getCursor(context)) {
Record record = cursor.getRecord();
while (cursor.hasNext()) {
String applicationIdString = record.getStr(0).toString();
if ( ! application.serializedForm().equals(applicationIdString)) continue;
String clusterId = record.getStr(1).toString();
if (cluster.value().equals(clusterId)) {
snapshots.add(new ClusterMetricSnapshot(Instant.ofEpochMilli(record.getTimestamp(2) / 1000),
record.getFloat(3),
record.getFloat(4)));
}
}
}
return new ClusterTimeseries(cluster, snapshots);
}
}
/** Issues an SQL statement against the QuestDb engine */
private CompiledQuery issue(String sql, SqlExecutionContext context) throws SqlException {
SqlCompiler sqlCompiler = sqlCompilerPool.alloc();
try {
return sqlCompiler.compile(sql, context);
} finally {
sqlCompilerPool.free(sqlCompiler);
}
}
private SqlExecutionContext newContext() {
return new SqlExecutionContextImpl(engine(), 1);
}
/** A questDb table */
private class Table {
private final Object writeLock = new Object();
private final String name;
private final Clock clock;
private final File dir;
private long highestTimestampAdded = 0;
Table(String dataDir, String name, Clock clock) {
this.name = name;
this.clock = clock;
this.dir = new File(dataDir, name);
IOUtils.createDirectory(dir.getPath());
new File(dir + "/_txn_scoreboard").delete();
}
boolean exists() {
return 0 == engine().getStatus(newContext().getCairoSecurityContext(), new Path(), name);
}
TableWriter getWriter() {
return engine().getWriter(newContext().getCairoSecurityContext(), name);
}
void gc() {
synchronized (writeLock) {
Instant oldestToKeep = clock.instant().minus(Duration.ofDays(4));
SqlExecutionContext context = newContext();
int partitions = 0;
try {
List<String> removeList = new ArrayList<>();
for (String dirEntry : dir.list()) {
File partitionDir = new File(dir, dirEntry);
if (!partitionDir.isDirectory()) continue;
partitions++;
DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneId.of("UTC"));
Instant partitionDay = Instant.from(formatter.parse(dirEntry.substring(0, 10) + "T00:00:00"));
if (partitionDay.isBefore(oldestToKeep))
removeList.add(dirEntry);
}
if (removeList.size() < partitions && !removeList.isEmpty()) {
issue("alter table " + name + " drop partition list " +
removeList.stream().map(dir -> "'" + dir + "'").collect(Collectors.joining(",")),
context);
}
} catch (SqlException e) {
log.log(Level.WARNING, "Failed to gc old metrics data in " + dir + " table " + name, e);
}
}
}
/**
* Repairs this db on corruption.
*
* @param e the exception indicating corruption
*/
private void repair(Exception e) {
log.log(Level.WARNING, "QuestDb seems corrupted, wiping data and starting over", e);
IOUtils.recursiveDeleteDir(dir);
IOUtils.createDirectory(dir.getPath());
ensureTablesExist();
}
void ensureColumnExists(String column, String columnType) throws SqlException {
if (columnNames().contains(column)) return;
issue("alter table " + name + " add column " + column + " " + columnType, newContext());
}
private Optional<Long> adjustOrDiscard(Instant at) {
long timestamp = at.toEpochMilli();
if (timestamp >= highestTimestampAdded) {
highestTimestampAdded = timestamp;
return Optional.of(timestamp);
}
if (timestamp >= highestTimestampAdded - 60 * 1000) return Optional.of(highestTimestampAdded);
return Optional.empty();
}
private List<String> columnNames() throws SqlException {
var context = newContext();
List<String> columns = new ArrayList<>();
try (RecordCursorFactory factory = issue("show columns from " + name, context).getRecordCursorFactory()) {
try (RecordCursor cursor = factory.getCursor(context)) {
Record record = cursor.getRecord();
while (cursor.hasNext()) {
columns.add(record.getStr(0).toString());
}
}
}
return columns;
}
}
} |
Yes, `Mono.just` would work but `Mono.fromCallable` further defers the creation of `AccessToken`, so if the reactive stream is never subscribed the `AccessToken` is never instantiated. | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromCallable(() -> new AccessToken(encodedCredential, OffsetDateTime.MAX));
} | return Mono.fromCallable(() -> new AccessToken(encodedCredential, OffsetDateTime.MAX)); | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromCallable(() -> new AccessToken(encodedCredential, OffsetDateTime.MAX));
} | class BasicAuthenticationCredential implements TokenCredential {
/**
* Base64 encoded username-password credential.
*/
private final String encodedCredential;
/**
* Creates a basic authentication credential.
*
* @param username basic auth user name
* @param password basic auth password
*/
public BasicAuthenticationCredential(String username, String password) {
String credential = username + ":" + password;
this.encodedCredential = Base64Util.encodeToString(credential.getBytes(StandardCharsets.UTF_8));
}
/**
* @throws RuntimeException If the UTF-8 encoding isn't supported.
*/
@Override
} | class BasicAuthenticationCredential implements TokenCredential {
/**
* Base64 encoded username-password credential.
*/
private final String encodedCredential;
/**
* Creates a basic authentication credential.
*
* @param username basic auth user name
* @param password basic auth password
*/
public BasicAuthenticationCredential(String username, String password) {
String credential = username + ":" + password;
this.encodedCredential = Base64Util.encodeToString(credential.getBytes(StandardCharsets.UTF_8));
}
/**
* @throws RuntimeException If the UTF-8 encoding isn't supported.
*/
@Override
} |
Both surrogate pairs have their sign bits at 1 so they can't be mistaken for the ascii we're replacing, so we're good. | private String doEscape(CharSequence value, int index, StringBuilder builder) {
int length = value.length();
while (++index < length) {
char c = value.charAt(index);
String replacement = replacements.get(c);
if (replacement != null) {
builder.append(replacement);
} else {
builder.append(c);
}
}
return builder.toString();
} | char c = value.charAt(index); | private String doEscape(CharSequence value, int index, StringBuilder builder) {
int length = value.length();
while (++index < length) {
char c = value.charAt(index);
String replacement = replacements.get(c);
if (replacement != null) {
builder.append(replacement);
} else {
builder.append(c);
}
}
return builder.toString();
} | class Escaper {
private final Map<Character, String> replacements;
/**
*
* @param replacements
*/
private Escaper(Map<Character, String> replacements) {
this.replacements = replacements.isEmpty() ? Collections.emptyMap()
: new HashMap<>(replacements);
}
/**
*
* @param value
* @return an escaped value
*/
public String escape(CharSequence value) {
Objects.requireNonNull(value);
if (value.length() == 0) {
return value.toString();
}
for (int i = 0; i < value.length(); i++) {
String replacement = replacements.get(value.charAt(i));
if (replacement != null) {
return doEscape(value, i, new StringBuilder(value.subSequence(0, i)).append(replacement));
}
}
return value.toString();
}
/**
*
* @return a new builder instance
*/
public static Builder builder() {
return new Builder();
}
public static class Builder {
private final Map<Character, String> replacements;
private Builder() {
this.replacements = new HashMap<>();
}
public Builder add(char c, String replacement) {
replacements.put(c, replacement);
return this;
}
public Escaper build() {
return new Escaper(replacements);
}
}
} | class Escaper {
private final Map<Character, String> replacements;
/**
*
* @param replacements
*/
private Escaper(Map<Character, String> replacements) {
this.replacements = replacements.isEmpty() ? Collections.emptyMap()
: new HashMap<>(replacements);
}
/**
*
* @param value
* @return an escaped value
*/
public String escape(CharSequence value) {
Objects.requireNonNull(value);
if (value.length() == 0) {
return value.toString();
}
for (int i = 0; i < value.length(); i++) {
String replacement = replacements.get(value.charAt(i));
if (replacement != null) {
return doEscape(value, i, new StringBuilder(value.subSequence(0, i)).append(replacement));
}
}
return value.toString();
}
/**
*
* @return a new builder instance
*/
public static Builder builder() {
return new Builder();
}
public static class Builder {
private final Map<Character, String> replacements;
private Builder() {
this.replacements = new HashMap<>();
}
public Builder add(char c, String replacement) {
replacements.put(c, replacement);
return this;
}
public Escaper build() {
return new Escaper(replacements);
}
}
} |
In the more general context, "this" in the presence of lambdas is tricky and could lead to bugs. I've probably had such case here as well before settling to the final code. | public Mono<Void> processChanges(ChangeFeedObserverContext context, List<CosmosItemProperties> docs) {
AutoCheckpointer self = this;
return self.observer.processChanges(context, docs)
.then(self.afterProcessChanges(context));
} | AutoCheckpointer self = this; | public Mono<Void> processChanges(ChangeFeedObserverContext context, List<CosmosItemProperties> docs) {
return this.observer.processChanges(context, docs)
.then(this.afterProcessChanges(context));
} | class AutoCheckpointer implements ChangeFeedObserver {
private final CheckpointFrequency checkpointFrequency;
private final ChangeFeedObserver observer;
private int processedDocCount;
private ZonedDateTime lastCheckpointTime;
public AutoCheckpointer(CheckpointFrequency checkpointFrequency, ChangeFeedObserver observer) {
if (checkpointFrequency == null) throw new IllegalArgumentException("checkpointFrequency");
if (observer == null) throw new IllegalArgumentException("observer");
this.checkpointFrequency = checkpointFrequency;
this.observer = observer;
this.lastCheckpointTime = ZonedDateTime.now(ZoneId.of("UTC"));
}
@Override
public void open(ChangeFeedObserverContext context) {
this.observer.open(context);
}
@Override
public void close(ChangeFeedObserverContext context, ChangeFeedObserverCloseReason reason) {
this.observer.close(context, reason);
}
@Override
private Mono<Void> afterProcessChanges(ChangeFeedObserverContext context) {
AutoCheckpointer self = this;
self.processedDocCount ++;
if (self.isCheckpointNeeded()) {
return context.checkpoint()
.doOnSuccess((Void) -> {
self.processedDocCount = 0;
self.lastCheckpointTime = ZonedDateTime.now(ZoneId.of("UTC"));
});
}
return Mono.empty();
}
private boolean isCheckpointNeeded() {
if (this.checkpointFrequency.getProcessedDocumentCount() == 0 && this.checkpointFrequency.getTimeInterval() == null) {
return true;
}
if (this.processedDocCount >= this.checkpointFrequency.getProcessedDocumentCount()) {
return true;
}
Duration delta = Duration.between(this.lastCheckpointTime, ZonedDateTime.now(ZoneId.of("UTC")));
if (delta.compareTo(this.checkpointFrequency.getTimeInterval()) >= 0) {
return true;
}
return false;
}
} | class AutoCheckpointer implements ChangeFeedObserver {
private final CheckpointFrequency checkpointFrequency;
private final ChangeFeedObserver observer;
private volatile int processedDocCount;
private volatile ZonedDateTime lastCheckpointTime;
public AutoCheckpointer(CheckpointFrequency checkpointFrequency, ChangeFeedObserver observer) {
if (checkpointFrequency == null) {
throw new IllegalArgumentException("checkpointFrequency");
}
if (observer == null) {
throw new IllegalArgumentException("observer");
}
this.checkpointFrequency = checkpointFrequency;
this.observer = observer;
this.lastCheckpointTime = ZonedDateTime.now(ZoneId.of("UTC"));
}
@Override
public void open(ChangeFeedObserverContext context) {
this.observer.open(context);
}
@Override
public void close(ChangeFeedObserverContext context, ChangeFeedObserverCloseReason reason) {
this.observer.close(context, reason);
}
@Override
private Mono<Void> afterProcessChanges(ChangeFeedObserverContext context) {
this.processedDocCount ++;
if (this.isCheckpointNeeded()) {
return context.checkpoint()
.doOnSuccess((Void) -> {
this.processedDocCount = 0;
this.lastCheckpointTime = ZonedDateTime.now(ZoneId.of("UTC"));
});
}
return Mono.empty();
}
private boolean isCheckpointNeeded() {
if (this.checkpointFrequency.getProcessedDocumentCount() == 0 && this.checkpointFrequency.getTimeInterval() == null) {
return true;
}
if (this.processedDocCount >= this.checkpointFrequency.getProcessedDocumentCount()) {
return true;
}
Duration delta = Duration.between(this.lastCheckpointTime, ZonedDateTime.now(ZoneId.of("UTC")));
return delta.compareTo(this.checkpointFrequency.getTimeInterval()) >= 0;
}
} |
I didn't add a unit test as after the other fixes in master (#14052) this change is not strictly necessary (though I think it's still less error-prone to not update SQN unnecessarily). | public void onBuffer(Buffer buffer, int sequenceNumber, int backlog) throws IOException {
boolean recycleBuffer = true;
try {
if (expectedSequenceNumber != sequenceNumber) {
onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber));
return;
}
final boolean wasEmpty;
boolean firstPriorityEvent = false;
synchronized (receivedBuffers) {
if (isReleased.get()) {
return;
}
wasEmpty = receivedBuffers.isEmpty();
SequenceBuffer sequenceBuffer = new SequenceBuffer(buffer, sequenceNumber);
DataType dataType = buffer.getDataType();
if (dataType.hasPriority()) {
firstPriorityEvent = addPriorityBuffer(sequenceBuffer);
}
else {
receivedBuffers.add(sequenceBuffer);
if (dataType.requiresAnnouncement()) {
firstPriorityEvent = addPriorityBuffer(announce(sequenceBuffer));
}
}
channelStatePersister.checkForBarrier(sequenceBuffer.buffer).ifPresent(id -> {
lastBarrierSequenceNumber = sequenceBuffer.sequenceNumber;
lastBarrierId = id;
});
channelStatePersister.maybePersist(buffer);
++expectedSequenceNumber;
}
recycleBuffer = false;
if (firstPriorityEvent) {
notifyPriorityEvent(sequenceNumber);
}
if (wasEmpty) {
notifyChannelNonEmpty();
}
if (backlog >= 0) {
onSenderBacklog(backlog);
}
} finally {
if (recycleBuffer) {
buffer.recycleBuffer();
}
}
} | recycleBuffer = false; | public void onBuffer(Buffer buffer, int sequenceNumber, int backlog) throws IOException {
boolean recycleBuffer = true;
try {
if (expectedSequenceNumber != sequenceNumber) {
onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber));
return;
}
final boolean wasEmpty;
boolean firstPriorityEvent = false;
synchronized (receivedBuffers) {
if (isReleased.get()) {
return;
}
wasEmpty = receivedBuffers.isEmpty();
SequenceBuffer sequenceBuffer = new SequenceBuffer(buffer, sequenceNumber);
DataType dataType = buffer.getDataType();
if (dataType.hasPriority()) {
firstPriorityEvent = addPriorityBuffer(sequenceBuffer);
}
else {
receivedBuffers.add(sequenceBuffer);
if (dataType.requiresAnnouncement()) {
firstPriorityEvent = addPriorityBuffer(announce(sequenceBuffer));
}
}
channelStatePersister
.checkForBarrier(sequenceBuffer.buffer)
.filter(id -> id > lastBarrierId)
.ifPresent(id -> {
lastBarrierId = id;
lastBarrierSequenceNumber = sequenceBuffer.sequenceNumber;
});
channelStatePersister.maybePersist(buffer);
++expectedSequenceNumber;
}
recycleBuffer = false;
if (firstPriorityEvent) {
notifyPriorityEvent(sequenceNumber);
}
if (wasEmpty) {
notifyChannelNonEmpty();
}
if (backlog >= 0) {
onSenderBacklog(backlog);
}
} finally {
if (recycleBuffer) {
buffer.recycleBuffer();
}
}
} | class RemoteInputChannel extends InputChannel {
private static final int NONE = -1;
/** ID to distinguish this channel from other channels sharing the same TCP connection. */
private final InputChannelID id = new InputChannelID();
/** The connection to use to request the remote partition. */
private final ConnectionID connectionId;
/** The connection manager to use connect to the remote partition provider. */
private final ConnectionManager connectionManager;
/**
* The received buffers. Received buffers are enqueued by the network I/O thread and the queue
* is consumed by the receiving task thread.
*/
private final PrioritizedDeque<SequenceBuffer> receivedBuffers = new PrioritizedDeque<>();
/**
* Flag indicating whether this channel has been released. Either called by the receiving task
* thread or the task manager actor.
*/
private final AtomicBoolean isReleased = new AtomicBoolean();
/** Client to establish a (possibly shared) TCP connection and request the partition. */
private volatile PartitionRequestClient partitionRequestClient;
/**
* The next expected sequence number for the next buffer.
*/
private int expectedSequenceNumber = 0;
/** The initial number of exclusive buffers assigned to this channel. */
private final int initialCredit;
/** The number of available buffers that have not been announced to the producer yet. */
private final AtomicInteger unannouncedCredit = new AtomicInteger(0);
private final BufferManager bufferManager;
@GuardedBy("receivedBuffers")
private int lastBarrierSequenceNumber = NONE;
@GuardedBy("receivedBuffers")
private long lastBarrierId = NONE;
private final ChannelStatePersister channelStatePersister;
public RemoteInputChannel(
SingleInputGate inputGate,
int channelIndex,
ResultPartitionID partitionId,
ConnectionID connectionId,
ConnectionManager connectionManager,
int initialBackOff,
int maxBackoff,
int networkBuffersPerChannel,
Counter numBytesIn,
Counter numBuffersIn,
ChannelStateWriter stateWriter) {
super(inputGate, channelIndex, partitionId, initialBackOff, maxBackoff, numBytesIn, numBuffersIn);
this.initialCredit = networkBuffersPerChannel;
this.connectionId = checkNotNull(connectionId);
this.connectionManager = checkNotNull(connectionManager);
this.bufferManager = new BufferManager(inputGate.getMemorySegmentProvider(), this, 0);
this.channelStatePersister = new ChannelStatePersister(stateWriter, getChannelInfo());
}
@VisibleForTesting
void setExpectedSequenceNumber(int expectedSequenceNumber) {
this.expectedSequenceNumber = expectedSequenceNumber;
}
/**
* Setup includes assigning exclusive buffers to this input channel, and this method should be called only once
* after this input channel is created.
*/
@Override
void setup() throws IOException {
checkState(bufferManager.unsynchronizedGetAvailableExclusiveBuffers() == 0,
"Bug in input channel setup logic: exclusive buffers have already been set for this input channel.");
bufferManager.requestExclusiveBuffers(initialCredit);
}
/**
* Requests a remote subpartition.
*/
@VisibleForTesting
@Override
public void requestSubpartition(int subpartitionIndex) throws IOException, InterruptedException {
if (partitionRequestClient == null) {
try {
partitionRequestClient = connectionManager.createPartitionRequestClient(connectionId);
} catch (IOException e) {
throw new PartitionConnectionException(partitionId, e);
}
partitionRequestClient.requestSubpartition(partitionId, subpartitionIndex, this, 0);
}
}
/**
* Retriggers a remote subpartition request.
*/
void retriggerSubpartitionRequest(int subpartitionIndex) throws IOException {
checkPartitionRequestQueueInitialized();
if (increaseBackoff()) {
partitionRequestClient.requestSubpartition(
partitionId, subpartitionIndex, this, getCurrentBackoff());
} else {
failPartitionRequest();
}
}
@Override
Optional<BufferAndAvailability> getNextBuffer() throws IOException {
checkPartitionRequestQueueInitialized();
final SequenceBuffer next;
final DataType nextDataType;
synchronized (receivedBuffers) {
next = receivedBuffers.poll();
nextDataType = receivedBuffers.peek() != null ? receivedBuffers.peek().buffer.getDataType() : DataType.NONE;
}
if (next == null) {
if (isReleased.get()) {
throw new CancelTaskException("Queried for a buffer after channel has been released.");
}
return Optional.empty();
}
numBytesIn.inc(next.buffer.getSize());
numBuffersIn.inc();
return Optional.of(new BufferAndAvailability(next.buffer, nextDataType, 0, next.sequenceNumber));
}
@Override
void sendTaskEvent(TaskEvent event) throws IOException {
checkState(!isReleased.get(), "Tried to send task event to producer after channel has been released.");
checkPartitionRequestQueueInitialized();
partitionRequestClient.sendTaskEvent(partitionId, event, this);
}
@Override
public boolean isReleased() {
return isReleased.get();
}
/**
* Releases all exclusive and floating buffers, closes the partition request client.
*/
@Override
void releaseAllResources() throws IOException {
if (isReleased.compareAndSet(false, true)) {
final ArrayDeque<Buffer> releasedBuffers;
synchronized (receivedBuffers) {
releasedBuffers = receivedBuffers.stream().map(sb -> sb.buffer)
.collect(Collectors.toCollection(ArrayDeque::new));
receivedBuffers.clear();
}
bufferManager.releaseAllBuffers(releasedBuffers);
if (partitionRequestClient != null) {
partitionRequestClient.close(this);
} else {
connectionManager.closeOpenChannelConnections(connectionId);
}
}
}
private void failPartitionRequest() {
setError(new PartitionNotFoundException(partitionId));
}
@Override
public String toString() {
return "RemoteInputChannel [" + partitionId + " at " + connectionId + "]";
}
/**
* Enqueue this input channel in the pipeline for notifying the producer of unannounced credit.
*/
private void notifyCreditAvailable() throws IOException {
checkPartitionRequestQueueInitialized();
partitionRequestClient.notifyCreditAvailable(this);
}
@VisibleForTesting
public int getNumberOfAvailableBuffers() {
return bufferManager.getNumberOfAvailableBuffers();
}
@VisibleForTesting
public int getNumberOfRequiredBuffers() {
return bufferManager.unsynchronizedGetNumberOfRequiredBuffers();
}
@VisibleForTesting
public int getSenderBacklog() {
return getNumberOfRequiredBuffers() - initialCredit;
}
@VisibleForTesting
boolean isWaitingForFloatingBuffers() {
return bufferManager.unsynchronizedIsWaitingForFloatingBuffers();
}
@VisibleForTesting
public Buffer getNextReceivedBuffer() {
final SequenceBuffer sequenceBuffer = receivedBuffers.poll();
return sequenceBuffer != null ? sequenceBuffer.buffer : null;
}
@VisibleForTesting
BufferManager getBufferManager() {
return bufferManager;
}
@VisibleForTesting
PartitionRequestClient getPartitionRequestClient() {
return partitionRequestClient;
}
/**
* The unannounced credit is increased by the given amount and might notify
* increased credit to the producer.
*/
@Override
public void notifyBufferAvailable(int numAvailableBuffers) throws IOException {
if (numAvailableBuffers > 0 && unannouncedCredit.getAndAdd(numAvailableBuffers) == 0) {
notifyCreditAvailable();
}
}
@Override
public void resumeConsumption() throws IOException {
checkState(!isReleased.get(), "Channel released.");
checkPartitionRequestQueueInitialized();
partitionRequestClient.resumeConsumption(this);
}
/**
* Gets the currently unannounced credit.
*
* @return Credit which was not announced to the sender yet.
*/
public int getUnannouncedCredit() {
return unannouncedCredit.get();
}
/**
* Gets the unannounced credit and resets it to <tt>0</tt> atomically.
*
* @return Credit which was not announced to the sender yet.
*/
public int getAndResetUnannouncedCredit() {
return unannouncedCredit.getAndSet(0);
}
/**
* Gets the current number of received buffers which have not been processed yet.
*
* @return Buffers queued for processing.
*/
public int getNumberOfQueuedBuffers() {
synchronized (receivedBuffers) {
return receivedBuffers.size();
}
}
@Override
public int unsynchronizedGetNumberOfQueuedBuffers() {
return Math.max(0, receivedBuffers.size());
}
public int unsynchronizedGetExclusiveBuffersUsed() {
return Math.max(0, initialCredit - bufferManager.unsynchronizedGetAvailableExclusiveBuffers());
}
public int unsynchronizedGetFloatingBuffersAvailable() {
return Math.max(0, bufferManager.unsynchronizedGetFloatingBuffersAvailable());
}
public InputChannelID getInputChannelId() {
return id;
}
public int getInitialCredit() {
return initialCredit;
}
public BufferProvider getBufferProvider() throws IOException {
if (isReleased.get()) {
return null;
}
return inputGate.getBufferProvider();
}
/**
* Requests buffer from input channel directly for receiving network data.
* It should always return an available buffer in credit-based mode unless
* the channel has been released.
*
* @return The available buffer.
*/
@Nullable
public Buffer requestBuffer() {
return bufferManager.requestBuffer();
}
/**
* Receives the backlog from the producer's buffer response. If the number of available
* buffers is less than backlog + initialCredit, it will request floating buffers from
* the buffer manager, and then notify unannounced credits to the producer.
*
* @param backlog The number of unsent buffers in the producer's sub partition.
*/
void onSenderBacklog(int backlog) throws IOException {
int numRequestedBuffers = bufferManager.requestFloatingBuffers(backlog + initialCredit);
if (numRequestedBuffers > 0 && unannouncedCredit.getAndAdd(numRequestedBuffers) == 0) {
notifyCreditAvailable();
}
}
/**
* @return {@code true} if this was first priority buffer added.
*/
private boolean addPriorityBuffer(SequenceBuffer sequenceBuffer) throws IOException {
receivedBuffers.addPriorityElement(sequenceBuffer);
channelStatePersister
.checkForBarrier(sequenceBuffer.buffer)
.filter(id -> id > lastBarrierId)
.ifPresent(id -> {
lastBarrierId = id;
lastBarrierSequenceNumber = sequenceBuffer.sequenceNumber;
});
return receivedBuffers.getNumPriorityElements() == 1;
}
private SequenceBuffer announce(SequenceBuffer sequenceBuffer) throws IOException {
checkState(!sequenceBuffer.buffer.isBuffer(), "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer);
checkAnnouncedOnlyOnce(sequenceBuffer);
AbstractEvent event = EventSerializer.fromBuffer(
sequenceBuffer.buffer,
getClass().getClassLoader());
checkState(event instanceof CheckpointBarrier, "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer);
CheckpointBarrier barrier = (CheckpointBarrier) event;
return new SequenceBuffer(
EventSerializer.toBuffer(new EventAnnouncement(barrier, sequenceBuffer.sequenceNumber), true),
sequenceBuffer.sequenceNumber);
}
private void checkAnnouncedOnlyOnce(SequenceBuffer sequenceBuffer) {
Iterator<SequenceBuffer> iterator = receivedBuffers.iterator();
int count = 0;
while (iterator.hasNext()) {
if (iterator.next().sequenceNumber == sequenceBuffer.sequenceNumber) {
count++;
}
}
checkState(
count == 1,
"Before enqueuing the announcement there should be exactly single occurrence of the buffer, but found [%d]",
count);
}
/**
* Spills all queued buffers on checkpoint start. If barrier has already been received (and reordered), spill only
* the overtaken buffers.
*/
public void checkpointStarted(CheckpointBarrier barrier) throws CheckpointException {
synchronized (receivedBuffers) {
channelStatePersister.startPersisting(
barrier.getId(),
getInflightBuffersUnsafe(barrier.getId()));
}
}
public void checkpointStopped(long checkpointId) {
synchronized (receivedBuffers) {
channelStatePersister.stopPersisting(checkpointId);
if (lastBarrierId == checkpointId) {
lastBarrierId = NONE;
lastBarrierSequenceNumber = NONE;
}
}
}
@VisibleForTesting
List<Buffer> getInflightBuffers(long checkpointId) throws CheckpointException {
synchronized (receivedBuffers) {
return getInflightBuffersUnsafe(checkpointId);
}
}
@Override
public void convertToPriorityEvent(int sequenceNumber) throws IOException {
boolean firstPriorityEvent;
synchronized (receivedBuffers) {
checkState(channelStatePersister.hasBarrierReceived());
int numPriorityElementsBeforeRemoval = receivedBuffers.getNumPriorityElements();
SequenceBuffer toPrioritize = receivedBuffers.getAndRemove(
sequenceBuffer -> sequenceBuffer.sequenceNumber == sequenceNumber);
checkState(lastBarrierSequenceNumber == sequenceNumber);
checkState(!toPrioritize.buffer.isBuffer());
checkState(
numPriorityElementsBeforeRemoval == receivedBuffers.getNumPriorityElements(),
"Attempted to convertToPriorityEvent an event [%s] that has already been prioritized [%s]",
toPrioritize,
numPriorityElementsBeforeRemoval);
firstPriorityEvent = addPriorityBuffer(toPrioritize);
}
if (firstPriorityEvent) {
notifyPriorityEvent(sequenceNumber);
}
}
/**
* Returns a list of buffers, checking the first n non-priority buffers, and skipping all events.
*/
private List<Buffer> getInflightBuffersUnsafe(long checkpointId) throws CheckpointException {
assert Thread.holdsLock(receivedBuffers);
if (checkpointId < lastBarrierId) {
throw new CheckpointException(
String.format("Sequence number for checkpoint %d is not known (it was likely been overwritten by a newer checkpoint %d)", checkpointId, lastBarrierId),
CheckpointFailureReason.CHECKPOINT_SUBSUMED);
}
final List<Buffer> inflightBuffers = new ArrayList<>();
Iterator<SequenceBuffer> iterator = receivedBuffers.iterator();
Iterators.advance(iterator, receivedBuffers.getNumPriorityElements());
while (iterator.hasNext()) {
SequenceBuffer sequenceBuffer = iterator.next();
if (sequenceBuffer.buffer.isBuffer()) {
if (shouldBeSpilled(sequenceBuffer.sequenceNumber)) {
inflightBuffers.add(sequenceBuffer.buffer.retainBuffer());
} else {
break;
}
}
}
return inflightBuffers;
}
/**
* @return if given {@param sequenceNumber} should be spilled given {@link
* We might not have yet received {@link CheckpointBarrier} and we might need to spill everything.
* If we have already received it, there is a bit nasty corner case of {@link SequenceBuffer
* overflowing that needs to be handled as well.
*/
private boolean shouldBeSpilled(int sequenceNumber) {
if (lastBarrierSequenceNumber == NONE) {
return true;
}
checkState(
receivedBuffers.size() < Integer.MAX_VALUE / 2,
"Too many buffers for sequenceNumber overflow detection code to work correctly");
boolean possibleOverflowAfterOvertaking = Integer.MAX_VALUE / 2 < lastBarrierSequenceNumber;
boolean possibleOverflowBeforeOvertaking = lastBarrierSequenceNumber < -Integer.MAX_VALUE / 2;
if (possibleOverflowAfterOvertaking) {
return sequenceNumber < lastBarrierSequenceNumber && sequenceNumber > 0;
}
else if (possibleOverflowBeforeOvertaking) {
return sequenceNumber < lastBarrierSequenceNumber || sequenceNumber > 0;
}
else {
return sequenceNumber < lastBarrierSequenceNumber;
}
}
public void onEmptyBuffer(int sequenceNumber, int backlog) throws IOException {
boolean success = false;
synchronized (receivedBuffers) {
if (!isReleased.get()) {
if (expectedSequenceNumber == sequenceNumber) {
expectedSequenceNumber++;
success = true;
} else {
onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber));
}
}
}
if (success && backlog >= 0) {
onSenderBacklog(backlog);
}
}
public void onFailedPartitionRequest() {
inputGate.triggerPartitionStateCheck(partitionId);
}
public void onError(Throwable cause) {
setError(cause);
}
private void checkPartitionRequestQueueInitialized() throws IOException {
checkError();
checkState(partitionRequestClient != null,
"Bug: partitionRequestClient is not initialized before processing data and no error is detected.");
}
private static class BufferReorderingException extends IOException {
private static final long serialVersionUID = -888282210356266816L;
private final int expectedSequenceNumber;
private final int actualSequenceNumber;
BufferReorderingException(int expectedSequenceNumber, int actualSequenceNumber) {
this.expectedSequenceNumber = expectedSequenceNumber;
this.actualSequenceNumber = actualSequenceNumber;
}
@Override
public String getMessage() {
return String.format("Buffer re-ordering: expected buffer with sequence number %d, but received %d.",
expectedSequenceNumber, actualSequenceNumber);
}
}
private static final class SequenceBuffer {
final Buffer buffer;
final int sequenceNumber;
private SequenceBuffer(Buffer buffer, int sequenceNumber) {
this.buffer = buffer;
this.sequenceNumber = sequenceNumber;
}
@Override
public String toString() {
return String.format(
"SequenceBuffer(isEvent = %s, dataType = %s, sequenceNumber = %s)",
!buffer.isBuffer(),
buffer.getDataType(),
sequenceNumber);
}
}
} | class RemoteInputChannel extends InputChannel {
private static final int NONE = -1;
/** ID to distinguish this channel from other channels sharing the same TCP connection. */
private final InputChannelID id = new InputChannelID();
/** The connection to use to request the remote partition. */
private final ConnectionID connectionId;
/** The connection manager to use connect to the remote partition provider. */
private final ConnectionManager connectionManager;
/**
* The received buffers. Received buffers are enqueued by the network I/O thread and the queue
* is consumed by the receiving task thread.
*/
private final PrioritizedDeque<SequenceBuffer> receivedBuffers = new PrioritizedDeque<>();
/**
* Flag indicating whether this channel has been released. Either called by the receiving task
* thread or the task manager actor.
*/
private final AtomicBoolean isReleased = new AtomicBoolean();
/** Client to establish a (possibly shared) TCP connection and request the partition. */
private volatile PartitionRequestClient partitionRequestClient;
/**
* The next expected sequence number for the next buffer.
*/
private int expectedSequenceNumber = 0;
/** The initial number of exclusive buffers assigned to this channel. */
private final int initialCredit;
/** The number of available buffers that have not been announced to the producer yet. */
private final AtomicInteger unannouncedCredit = new AtomicInteger(0);
private final BufferManager bufferManager;
@GuardedBy("receivedBuffers")
private int lastBarrierSequenceNumber = NONE;
@GuardedBy("receivedBuffers")
private long lastBarrierId = NONE;
private final ChannelStatePersister channelStatePersister;
public RemoteInputChannel(
SingleInputGate inputGate,
int channelIndex,
ResultPartitionID partitionId,
ConnectionID connectionId,
ConnectionManager connectionManager,
int initialBackOff,
int maxBackoff,
int networkBuffersPerChannel,
Counter numBytesIn,
Counter numBuffersIn,
ChannelStateWriter stateWriter) {
super(inputGate, channelIndex, partitionId, initialBackOff, maxBackoff, numBytesIn, numBuffersIn);
this.initialCredit = networkBuffersPerChannel;
this.connectionId = checkNotNull(connectionId);
this.connectionManager = checkNotNull(connectionManager);
this.bufferManager = new BufferManager(inputGate.getMemorySegmentProvider(), this, 0);
this.channelStatePersister = new ChannelStatePersister(stateWriter, getChannelInfo());
}
@VisibleForTesting
void setExpectedSequenceNumber(int expectedSequenceNumber) {
this.expectedSequenceNumber = expectedSequenceNumber;
}
/**
* Setup includes assigning exclusive buffers to this input channel, and this method should be called only once
* after this input channel is created.
*/
@Override
void setup() throws IOException {
checkState(bufferManager.unsynchronizedGetAvailableExclusiveBuffers() == 0,
"Bug in input channel setup logic: exclusive buffers have already been set for this input channel.");
bufferManager.requestExclusiveBuffers(initialCredit);
}
/**
* Requests a remote subpartition.
*/
@VisibleForTesting
@Override
public void requestSubpartition(int subpartitionIndex) throws IOException, InterruptedException {
if (partitionRequestClient == null) {
try {
partitionRequestClient = connectionManager.createPartitionRequestClient(connectionId);
} catch (IOException e) {
throw new PartitionConnectionException(partitionId, e);
}
partitionRequestClient.requestSubpartition(partitionId, subpartitionIndex, this, 0);
}
}
/**
* Retriggers a remote subpartition request.
*/
void retriggerSubpartitionRequest(int subpartitionIndex) throws IOException {
checkPartitionRequestQueueInitialized();
if (increaseBackoff()) {
partitionRequestClient.requestSubpartition(
partitionId, subpartitionIndex, this, getCurrentBackoff());
} else {
failPartitionRequest();
}
}
@Override
Optional<BufferAndAvailability> getNextBuffer() throws IOException {
checkPartitionRequestQueueInitialized();
final SequenceBuffer next;
final DataType nextDataType;
synchronized (receivedBuffers) {
next = receivedBuffers.poll();
nextDataType = receivedBuffers.peek() != null ? receivedBuffers.peek().buffer.getDataType() : DataType.NONE;
}
if (next == null) {
if (isReleased.get()) {
throw new CancelTaskException("Queried for a buffer after channel has been released.");
}
return Optional.empty();
}
numBytesIn.inc(next.buffer.getSize());
numBuffersIn.inc();
return Optional.of(new BufferAndAvailability(next.buffer, nextDataType, 0, next.sequenceNumber));
}
@Override
void sendTaskEvent(TaskEvent event) throws IOException {
checkState(!isReleased.get(), "Tried to send task event to producer after channel has been released.");
checkPartitionRequestQueueInitialized();
partitionRequestClient.sendTaskEvent(partitionId, event, this);
}
@Override
public boolean isReleased() {
return isReleased.get();
}
/**
* Releases all exclusive and floating buffers, closes the partition request client.
*/
@Override
void releaseAllResources() throws IOException {
if (isReleased.compareAndSet(false, true)) {
final ArrayDeque<Buffer> releasedBuffers;
synchronized (receivedBuffers) {
releasedBuffers = receivedBuffers.stream().map(sb -> sb.buffer)
.collect(Collectors.toCollection(ArrayDeque::new));
receivedBuffers.clear();
}
bufferManager.releaseAllBuffers(releasedBuffers);
if (partitionRequestClient != null) {
partitionRequestClient.close(this);
} else {
connectionManager.closeOpenChannelConnections(connectionId);
}
}
}
private void failPartitionRequest() {
setError(new PartitionNotFoundException(partitionId));
}
@Override
public String toString() {
return "RemoteInputChannel [" + partitionId + " at " + connectionId + "]";
}
/**
* Enqueue this input channel in the pipeline for notifying the producer of unannounced credit.
*/
private void notifyCreditAvailable() throws IOException {
checkPartitionRequestQueueInitialized();
partitionRequestClient.notifyCreditAvailable(this);
}
@VisibleForTesting
public int getNumberOfAvailableBuffers() {
return bufferManager.getNumberOfAvailableBuffers();
}
@VisibleForTesting
public int getNumberOfRequiredBuffers() {
return bufferManager.unsynchronizedGetNumberOfRequiredBuffers();
}
@VisibleForTesting
public int getSenderBacklog() {
return getNumberOfRequiredBuffers() - initialCredit;
}
@VisibleForTesting
boolean isWaitingForFloatingBuffers() {
return bufferManager.unsynchronizedIsWaitingForFloatingBuffers();
}
@VisibleForTesting
public Buffer getNextReceivedBuffer() {
final SequenceBuffer sequenceBuffer = receivedBuffers.poll();
return sequenceBuffer != null ? sequenceBuffer.buffer : null;
}
@VisibleForTesting
BufferManager getBufferManager() {
return bufferManager;
}
@VisibleForTesting
PartitionRequestClient getPartitionRequestClient() {
return partitionRequestClient;
}
/**
* The unannounced credit is increased by the given amount and might notify
* increased credit to the producer.
*/
@Override
public void notifyBufferAvailable(int numAvailableBuffers) throws IOException {
if (numAvailableBuffers > 0 && unannouncedCredit.getAndAdd(numAvailableBuffers) == 0) {
notifyCreditAvailable();
}
}
@Override
public void resumeConsumption() throws IOException {
checkState(!isReleased.get(), "Channel released.");
checkPartitionRequestQueueInitialized();
partitionRequestClient.resumeConsumption(this);
}
/**
* Gets the currently unannounced credit.
*
* @return Credit which was not announced to the sender yet.
*/
public int getUnannouncedCredit() {
return unannouncedCredit.get();
}
/**
* Gets the unannounced credit and resets it to <tt>0</tt> atomically.
*
* @return Credit which was not announced to the sender yet.
*/
public int getAndResetUnannouncedCredit() {
return unannouncedCredit.getAndSet(0);
}
/**
* Gets the current number of received buffers which have not been processed yet.
*
* @return Buffers queued for processing.
*/
public int getNumberOfQueuedBuffers() {
synchronized (receivedBuffers) {
return receivedBuffers.size();
}
}
@Override
public int unsynchronizedGetNumberOfQueuedBuffers() {
return Math.max(0, receivedBuffers.size());
}
public int unsynchronizedGetExclusiveBuffersUsed() {
return Math.max(0, initialCredit - bufferManager.unsynchronizedGetAvailableExclusiveBuffers());
}
public int unsynchronizedGetFloatingBuffersAvailable() {
return Math.max(0, bufferManager.unsynchronizedGetFloatingBuffersAvailable());
}
public InputChannelID getInputChannelId() {
return id;
}
public int getInitialCredit() {
return initialCredit;
}
public BufferProvider getBufferProvider() throws IOException {
if (isReleased.get()) {
return null;
}
return inputGate.getBufferProvider();
}
/**
* Requests buffer from input channel directly for receiving network data.
* It should always return an available buffer in credit-based mode unless
* the channel has been released.
*
* @return The available buffer.
*/
@Nullable
public Buffer requestBuffer() {
return bufferManager.requestBuffer();
}
/**
* Receives the backlog from the producer's buffer response. If the number of available
* buffers is less than backlog + initialCredit, it will request floating buffers from
* the buffer manager, and then notify unannounced credits to the producer.
*
* @param backlog The number of unsent buffers in the producer's sub partition.
*/
void onSenderBacklog(int backlog) throws IOException {
int numRequestedBuffers = bufferManager.requestFloatingBuffers(backlog + initialCredit);
if (numRequestedBuffers > 0 && unannouncedCredit.getAndAdd(numRequestedBuffers) == 0) {
notifyCreditAvailable();
}
}
/**
* @return {@code true} if this was first priority buffer added.
*/
private boolean addPriorityBuffer(SequenceBuffer sequenceBuffer) {
receivedBuffers.addPriorityElement(sequenceBuffer);
return receivedBuffers.getNumPriorityElements() == 1;
}
private SequenceBuffer announce(SequenceBuffer sequenceBuffer) throws IOException {
checkState(!sequenceBuffer.buffer.isBuffer(), "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer);
checkAnnouncedOnlyOnce(sequenceBuffer);
AbstractEvent event = EventSerializer.fromBuffer(
sequenceBuffer.buffer,
getClass().getClassLoader());
checkState(event instanceof CheckpointBarrier, "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer);
CheckpointBarrier barrier = (CheckpointBarrier) event;
return new SequenceBuffer(
EventSerializer.toBuffer(new EventAnnouncement(barrier, sequenceBuffer.sequenceNumber), true),
sequenceBuffer.sequenceNumber);
}
private void checkAnnouncedOnlyOnce(SequenceBuffer sequenceBuffer) {
Iterator<SequenceBuffer> iterator = receivedBuffers.iterator();
int count = 0;
while (iterator.hasNext()) {
if (iterator.next().sequenceNumber == sequenceBuffer.sequenceNumber) {
count++;
}
}
checkState(
count == 1,
"Before enqueuing the announcement there should be exactly single occurrence of the buffer, but found [%d]",
count);
}
/**
* Spills all queued buffers on checkpoint start. If barrier has already been received (and reordered), spill only
* the overtaken buffers.
*/
public void checkpointStarted(CheckpointBarrier barrier) throws CheckpointException {
synchronized (receivedBuffers) {
channelStatePersister.startPersisting(
barrier.getId(),
getInflightBuffersUnsafe(barrier.getId()));
}
}
public void checkpointStopped(long checkpointId) {
synchronized (receivedBuffers) {
channelStatePersister.stopPersisting(checkpointId);
if (lastBarrierId == checkpointId) {
lastBarrierId = NONE;
lastBarrierSequenceNumber = NONE;
}
}
}
@VisibleForTesting
List<Buffer> getInflightBuffers(long checkpointId) throws CheckpointException {
synchronized (receivedBuffers) {
return getInflightBuffersUnsafe(checkpointId);
}
}
@Override
public void convertToPriorityEvent(int sequenceNumber) throws IOException {
boolean firstPriorityEvent;
synchronized (receivedBuffers) {
checkState(channelStatePersister.hasBarrierReceived());
int numPriorityElementsBeforeRemoval = receivedBuffers.getNumPriorityElements();
SequenceBuffer toPrioritize = receivedBuffers.getAndRemove(
sequenceBuffer -> sequenceBuffer.sequenceNumber == sequenceNumber);
checkState(lastBarrierSequenceNumber == sequenceNumber);
checkState(!toPrioritize.buffer.isBuffer());
checkState(
numPriorityElementsBeforeRemoval == receivedBuffers.getNumPriorityElements(),
"Attempted to convertToPriorityEvent an event [%s] that has already been prioritized [%s]",
toPrioritize,
numPriorityElementsBeforeRemoval);
AbstractEvent e = EventSerializer.fromBuffer(toPrioritize.buffer, this.getClass().getClassLoader());
toPrioritize.buffer.setReaderIndex(0);
toPrioritize = new SequenceBuffer(EventSerializer.toBuffer(e, true), toPrioritize.sequenceNumber);
firstPriorityEvent = addPriorityBuffer(toPrioritize);
}
if (firstPriorityEvent) {
notifyPriorityEventForce();
}
}
private void notifyPriorityEventForce() {
inputGate.notifyPriorityEventForce(this);
}
/**
* Returns a list of buffers, checking the first n non-priority buffers, and skipping all events.
*/
private List<Buffer> getInflightBuffersUnsafe(long checkpointId) throws CheckpointException {
assert Thread.holdsLock(receivedBuffers);
if (checkpointId < lastBarrierId) {
throw new CheckpointException(
String.format("Sequence number for checkpoint %d is not known (it was likely been overwritten by a newer checkpoint %d)", checkpointId, lastBarrierId),
CheckpointFailureReason.CHECKPOINT_SUBSUMED);
}
final List<Buffer> inflightBuffers = new ArrayList<>();
Iterator<SequenceBuffer> iterator = receivedBuffers.iterator();
Iterators.advance(iterator, receivedBuffers.getNumPriorityElements());
while (iterator.hasNext()) {
SequenceBuffer sequenceBuffer = iterator.next();
if (sequenceBuffer.buffer.isBuffer()) {
if (shouldBeSpilled(sequenceBuffer.sequenceNumber)) {
inflightBuffers.add(sequenceBuffer.buffer.retainBuffer());
} else {
break;
}
}
}
return inflightBuffers;
}
/**
* @return if given {@param sequenceNumber} should be spilled given {@link
* We might not have yet received {@link CheckpointBarrier} and we might need to spill everything.
* If we have already received it, there is a bit nasty corner case of {@link SequenceBuffer
* overflowing that needs to be handled as well.
*/
private boolean shouldBeSpilled(int sequenceNumber) {
if (lastBarrierSequenceNumber == NONE) {
return true;
}
checkState(
receivedBuffers.size() < Integer.MAX_VALUE / 2,
"Too many buffers for sequenceNumber overflow detection code to work correctly");
boolean possibleOverflowAfterOvertaking = Integer.MAX_VALUE / 2 < lastBarrierSequenceNumber;
boolean possibleOverflowBeforeOvertaking = lastBarrierSequenceNumber < -Integer.MAX_VALUE / 2;
if (possibleOverflowAfterOvertaking) {
return sequenceNumber < lastBarrierSequenceNumber && sequenceNumber > 0;
}
else if (possibleOverflowBeforeOvertaking) {
return sequenceNumber < lastBarrierSequenceNumber || sequenceNumber > 0;
}
else {
return sequenceNumber < lastBarrierSequenceNumber;
}
}
public void onEmptyBuffer(int sequenceNumber, int backlog) throws IOException {
boolean success = false;
synchronized (receivedBuffers) {
if (!isReleased.get()) {
if (expectedSequenceNumber == sequenceNumber) {
expectedSequenceNumber++;
success = true;
} else {
onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber));
}
}
}
if (success && backlog >= 0) {
onSenderBacklog(backlog);
}
}
public void onFailedPartitionRequest() {
inputGate.triggerPartitionStateCheck(partitionId);
}
public void onError(Throwable cause) {
setError(cause);
}
private void checkPartitionRequestQueueInitialized() throws IOException {
checkError();
checkState(partitionRequestClient != null,
"Bug: partitionRequestClient is not initialized before processing data and no error is detected.");
}
private static class BufferReorderingException extends IOException {
private static final long serialVersionUID = -888282210356266816L;
private final int expectedSequenceNumber;
private final int actualSequenceNumber;
BufferReorderingException(int expectedSequenceNumber, int actualSequenceNumber) {
this.expectedSequenceNumber = expectedSequenceNumber;
this.actualSequenceNumber = actualSequenceNumber;
}
@Override
public String getMessage() {
return String.format("Buffer re-ordering: expected buffer with sequence number %d, but received %d.",
expectedSequenceNumber, actualSequenceNumber);
}
}
private static final class SequenceBuffer {
final Buffer buffer;
final int sequenceNumber;
private SequenceBuffer(Buffer buffer, int sequenceNumber) {
this.buffer = buffer;
this.sequenceNumber = sequenceNumber;
}
@Override
public String toString() {
return String.format(
"SequenceBuffer(isEvent = %s, dataType = %s, sequenceNumber = %s)",
!buffer.isBuffer(),
buffer.getDataType(),
sequenceNumber);
}
}
} |
Do we want to keep the `catch` and log the testing error? | public void intercept(IMethodInvocation invocation) throws Throwable {
String testName = TestNameProvider.getTestName(invocation.getIteration());
System.out.printf("%s is starting", testName);
long startTimestamp = System.currentTimeMillis();
try {
invocation.proceed();
} finally {
long duration = System.currentTimeMillis() - startTimestamp;
System.out.printf("%s finished and took %d ms", testName, duration);
}
} | } finally { | public void intercept(IMethodInvocation invocation) throws Throwable {
String testName = TestNameProvider.getTestName(invocation.getIteration());
System.out.printf("%s is starting%n", testName);
LOGGER.info("{} is starting", testName);
long startTimestamp = System.currentTimeMillis();
try {
invocation.proceed();
} finally {
long duration = System.currentTimeMillis() - startTimestamp;
System.out.printf("%s finished and took %d ms%n", testName, duration);
LOGGER.info("{} finished and took {} ms", testName, duration);
}
} | class TestHeaderIterationInterceptor implements IMethodInterceptor {
@Override
} | class TestHeaderIterationInterceptor implements IMethodInterceptor {
@Override
} |
The base class' ChangeFeedStartFromInternal.populatePropertyBag is a no-op as is its baseclass JsonSerializable's populatePropertyBag method. | public void populatePropertyBag() {
super.populatePropertyBag();
synchronized(this) {
setProperty(
this,
Constants.Properties.CHANGE_FEED_START_FROM_TYPE,
ChangeFeedStartFromTypes.NOW);
}
} | super.populatePropertyBag(); | public void populatePropertyBag() {
super.populatePropertyBag();
synchronized(this) {
setProperty(
this,
Constants.Properties.CHANGE_FEED_START_FROM_TYPE,
ChangeFeedStartFromTypes.NOW);
}
} | class ChangeFeedStartFromNowImpl extends ChangeFeedStartFromInternal {
public ChangeFeedStartFromNowImpl() {
super();
}
@Override
@Override
public boolean supportsFullFidelityRetention() {
return true;
}
@Override
public void populateRequest(RxDocumentServiceRequest request) {
checkNotNull(request, "Argument 'request' must not be null.");
request.getHeaders().put(
HttpConstants.HttpHeaders.IF_NONE_MATCH,
HttpConstants.HeaderValues.IF_NONE_MATCH_ALL);
}
} | class ChangeFeedStartFromNowImpl extends ChangeFeedStartFromInternal {
public ChangeFeedStartFromNowImpl() {
super();
}
@Override
@Override
public boolean supportsFullFidelityRetention() {
return true;
}
@Override
public void populateRequest(RxDocumentServiceRequest request) {
checkNotNull(request, "Argument 'request' must not be null.");
request.getHeaders().put(
HttpConstants.HttpHeaders.IF_NONE_MATCH,
HttpConstants.HeaderValues.IF_NONE_MATCH_ALL);
}
} |
The merge function itself now verifies this. | public PCollection<Row> expand(PCollectionList<Row> inputs) {
checkArgument(
inputs.size() == 2,
"Wrong number of arguments to %s: %s",
beamRelNode.getClass().getSimpleName(),
inputs);
PCollection<Row> leftRows = inputs.get(0);
PCollection<Row> rightRows = inputs.get(1);
WindowFn leftWindow = leftRows.getWindowingStrategy().getWindowFn();
WindowFn rightWindow = rightRows.getWindowingStrategy().getWindowFn();
if (!leftWindow.isCompatible(rightWindow)) {
throw new IllegalArgumentException(
"inputs of "
+ opType
+ " have different window strategy: "
+ leftWindow
+ " VS "
+ rightWindow);
}
final String lhsTag = "lhs";
final String rhsTag = "rhs";
PCollection<Row> joined =
PCollectionTuple.of(lhsTag, leftRows, rhsTag, rightRows)
.apply("CoGroup", CoGroup.join(By.fieldNames("*")));
return joined
.apply(
"FilterResults",
ParDo.of(
new BeamSetOperatorsTransforms.SetOperatorFilteringDoFn(
lhsTag, rhsTag, opType, all)))
.setRowSchema(joined.getSchema().getField("key").getType().getRowSchema());
} | + rightWindow); | public PCollection<Row> expand(PCollectionList<Row> inputs) {
checkArgument(
inputs.size() == 2,
"Wrong number of arguments to %s: %s",
beamRelNode.getClass().getSimpleName(),
inputs);
PCollection<Row> leftRows = inputs.get(0);
PCollection<Row> rightRows = inputs.get(1);
WindowFn leftWindow = leftRows.getWindowingStrategy().getWindowFn();
WindowFn rightWindow = rightRows.getWindowingStrategy().getWindowFn();
if (!leftWindow.isCompatible(rightWindow)) {
throw new IllegalArgumentException(
"inputs of "
+ opType
+ " have different window strategy: "
+ leftWindow
+ " VS "
+ rightWindow);
}
final String lhsTag = "lhs";
final String rhsTag = "rhs";
PCollection<Row> joined =
PCollectionTuple.of(lhsTag, leftRows, rhsTag, rightRows)
.apply("CoGroup", CoGroup.join(By.fieldNames("*")));
return joined
.apply(
"FilterResults",
ParDo.of(
new BeamSetOperatorsTransforms.SetOperatorFilteringDoFn(
lhsTag, rhsTag, opType, all)))
.setRowSchema(joined.getSchema().getField("key").getType().getRowSchema());
} | class BeamSetOperatorRelBase extends PTransform<PCollectionList<Row>, PCollection<Row>> {
/** Set operator type. */
public enum OpType implements Serializable {
UNION,
INTERSECT,
MINUS
}
private BeamRelNode beamRelNode;
private boolean all;
private OpType opType;
public BeamSetOperatorRelBase(BeamRelNode beamRelNode, OpType opType, boolean all) {
this.beamRelNode = beamRelNode;
this.opType = opType;
this.all = all;
}
@Override
} | class BeamSetOperatorRelBase extends PTransform<PCollectionList<Row>, PCollection<Row>> {
/** Set operator type. */
public enum OpType implements Serializable {
UNION,
INTERSECT,
MINUS
}
private BeamRelNode beamRelNode;
private boolean all;
private OpType opType;
public BeamSetOperatorRelBase(BeamRelNode beamRelNode, OpType opType, boolean all) {
this.beamRelNode = beamRelNode;
this.opType = opType;
this.all = all;
}
@Override
} |
Do we need this one? we get the referredType in `public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {` | private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) {
int listExprSize = 0;
if (arrayType.state != BArrayState.OPEN) {
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
listExprSize++;
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
spreadOpType = Types.getReferredType(spreadOpType);
switch (spreadOpType.tag) {
case TypeTags.ARRAY:
int arraySize = ((BArrayType) spreadOpType).size;
if (arraySize >= 0) {
listExprSize += arraySize;
continue;
}
dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
case TypeTags.TUPLE:
BTupleType tType = (BTupleType) spreadOpType;
if (isFixedLengthTuple(tType)) {
listExprSize += tType.tupleTypes.size();
continue;
}
dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
}
}
}
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listExprSize;
arrayType.state = BArrayState.CLOSED;
} else if (arrayType.state != BArrayState.OPEN && arrayType.size != listExprSize) {
if (arrayType.size < listExprSize) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listExprSize);
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
errored |= exprIncompatible(eType, expr);
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
BType spreadOpReferredType = Types.getReferredType(spreadOpType);
switch (spreadOpReferredType.tag) {
case TypeTags.ARRAY:
BType spreadOpeType = ((BArrayType) spreadOpReferredType).eType;
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpeType, eType)) {
return symTable.semanticError;
}
break;
case TypeTags.TUPLE:
BTupleType spreadOpTuple = (BTupleType) spreadOpReferredType;
List<BType> tupleTypes = spreadOpTuple.tupleTypes;
for (BType tupleMemberType : tupleTypes) {
if (types.typeIncompatible(spreadOpExpr.pos, tupleMemberType, eType)) {
return symTable.semanticError;
}
}
if (!isFixedLengthTuple(spreadOpTuple)) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.restType, eType)) {
return symTable.semanticError;
}
}
break;
default:
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_LIST_SPREAD_OP, spreadOpType);
return symTable.semanticError;
}
}
return errored ? symTable.semanticError : arrayType;
} | spreadOpType = Types.getReferredType(spreadOpType); | private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) {
int listExprSize = 0;
if (arrayType.state != BArrayState.OPEN) {
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
listExprSize++;
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
spreadOpType = Types.getReferredType(spreadOpType);
switch (spreadOpType.tag) {
case TypeTags.ARRAY:
int arraySize = ((BArrayType) spreadOpType).size;
if (arraySize >= 0) {
listExprSize += arraySize;
continue;
}
dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
case TypeTags.TUPLE:
BTupleType tType = (BTupleType) spreadOpType;
if (isFixedLengthTuple(tType)) {
listExprSize += tType.tupleTypes.size();
continue;
}
dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
}
}
}
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listExprSize;
arrayType.state = BArrayState.CLOSED;
} else if (arrayType.state != BArrayState.OPEN && arrayType.size != listExprSize) {
if (arrayType.size < listExprSize) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listExprSize);
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
errored |= exprIncompatible(eType, expr);
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
BType spreadOpReferredType = Types.getReferredType(spreadOpType);
switch (spreadOpReferredType.tag) {
case TypeTags.ARRAY:
BType spreadOpeType = ((BArrayType) spreadOpReferredType).eType;
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpeType, eType)) {
return symTable.semanticError;
}
break;
case TypeTags.TUPLE:
BTupleType spreadOpTuple = (BTupleType) spreadOpReferredType;
List<BType> tupleTypes = spreadOpTuple.tupleTypes;
for (BType tupleMemberType : tupleTypes) {
if (types.typeIncompatible(spreadOpExpr.pos, tupleMemberType, eType)) {
return symTable.semanticError;
}
}
if (!isFixedLengthTuple(spreadOpTuple)) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.restType, eType)) {
return symTable.semanticError;
}
}
break;
default:
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_LIST_SPREAD_OP, spreadOpType);
return symTable.semanticError;
}
}
return errored ? symTable.semanticError : arrayType;
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private NodeCloner nodeCloner;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
private BLangAnonymousModelHelper anonymousModelHelper;
private SemanticAnalyzer semanticAnalyzer;
private Unifier unifier;
private boolean nonErrorLoggingCheck = false;
private int letCount = 0;
private Stack<SymbolEnv> queryEnvs, prevEnvs;
private Stack<BLangNode> queryFinalClauses;
private boolean checkWithinQueryExpr = false;
private BLangMissingNodesHelper missingNodesHelper;
private boolean breakToParallelQueryEnv = false;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.queryFinalClauses = new Stack<>();
this.queryEnvs = new Stack<>();
this.prevEnvs = new Stack<>();
this.unifier = new Unifier();
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
BType referredExpType = Types.getReferredType(expType);
if (referredExpType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) referredExpType).effectiveType;
}
expr.expectedType = expType;
expr.accept(this);
BType resultRefType = Types.getReferredType(resultType);
if (resultRefType.tag == TypeTags.INTERSECTION) {
resultType = ((BIntersectionType) resultRefType).effectiveType;
}
expr.setTypeCheckedType(resultType);
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr);
return resultType;
}
private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) {
if (!nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
}
private void validateAndSetExprExpectedType(BLangExpression expr) {
if (resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
Types.getReferredType(expr.expectedType).tag == TypeTags.MAP
&& Types.getReferredType(expr.getBType()).tag == TypeTags.RECORD) {
return;
}
expr.expectedType = resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
checkXMLNamespacePrefixes(xmlElementAccess.filters);
checkExpr(xmlElementAccess.expr, env, symTable.xmlType);
resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
checkXMLNamespacePrefixes(xmlNavigation.filters);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, env, symTable.intType);
}
BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType);
if (Types.getReferredType(exprType).tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
resultType = symTable.xmlType;
} else {
resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private int getPreferredMemberTypeTag(BFiniteType finiteType) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
int typeTag = Types.getReferredType(valueExpr.getBType()).tag;
if (typeTag > TypeTags.DECIMAL) {
continue;
}
for (int i = TypeTags.INT; i <= TypeTags.DECIMAL; i++) {
if (typeTag == i) {
return i;
}
}
}
return TypeTags.NONE;
}
private BType getFiniteTypeMatchWithIntType(BLangLiteral literalExpr, BFiniteType finiteType) {
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
setLiteralValueForFiniteType(literalExpr, symTable.intType);
return symTable.intType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
setLiteralValueForFiniteType(literalExpr, symTable.byteType);
return symTable.byteType;
} else {
for (int tag = TypeTags.SIGNED32_INT; tag <= TypeTags.UNSIGNED8_INT; tag++) {
if (literalAssignableToFiniteType(literalExpr, finiteType, tag)) {
setLiteralValueForFiniteType(literalExpr, symTable.getTypeFromTag(tag));
return symTable.getTypeFromTag(tag);
}
}
}
return symTable.noType;
}
private BType getFiniteTypeMatchWithIntLiteral(BLangLiteral literalExpr, BFiniteType finiteType,
Object literalValue) {
BType intLiteralType = getFiniteTypeMatchWithIntType(literalExpr, finiteType);
if (intLiteralType != symTable.noType) {
return intLiteralType;
}
int typeTag = getPreferredMemberTypeTag(finiteType);
if (typeTag == TypeTags.NONE) {
return symTable.intType;
}
if (literalAssignableToFiniteType(literalExpr, finiteType, typeTag)) {
BType type = symTable.getTypeFromTag(typeTag);
setLiteralValueForFiniteType(literalExpr, type);
literalExpr.value = String.valueOf(literalValue);
return type;
}
return symTable.intType;
}
private BType getIntegerLiteralType(BLangLiteral literalExpr, Object literalValue, BType expType) {
BType expectedType = Types.getReferredType(expType);
if (expectedType.tag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(expectedType.tag)) {
return getIntLiteralType(expType, literalValue);
} else if (expectedType.tag == TypeTags.FLOAT) {
literalExpr.value = ((Long) literalValue).doubleValue();
return symTable.floatType;
} else if (expectedType.tag == TypeTags.DECIMAL) {
literalExpr.value = String.valueOf(literalValue);
return symTable.decimalType;
} else if (expectedType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) expectedType;
return getFiniteTypeMatchWithIntLiteral(literalExpr, finiteType, literalValue);
} else if (expectedType.tag == TypeTags.UNION) {
for (BType memType : types.getAllTypes(expectedType, true)) {
BType memberRefType = Types.getReferredType(memType);
if (TypeTags.isIntegerTypeTag(memberRefType.tag) || memberRefType.tag == TypeTags.BYTE) {
BType intLiteralType = getIntLiteralType(memType, literalValue);
if (intLiteralType == memberRefType) {
return intLiteralType;
}
} else if (memberRefType.tag == TypeTags.JSON || memberRefType.tag == TypeTags.ANYDATA ||
memberRefType.tag == TypeTags.ANY) {
return symTable.intType;
}
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expectedType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
BType finiteTypeMatchingByte = getFiniteTypeWithValuesOfSingleType((BUnionType) expectedType,
symTable.byteType);
if (finiteTypeMatchingByte != symTable.semanticError) {
finiteType = finiteTypeMatchingByte;
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
Set<BType> memberTypes = ((BUnionType) expectedType).getMemberTypes();
return getTypeMatchingFloatOrDecimal(finiteType, memberTypes, literalExpr, (BUnionType) expectedType);
}
return symTable.intType;
}
private BType getTypeOfLiteralWithFloatDiscriminator(BLangLiteral literalExpr, Object literalValue, BType expType) {
String numericLiteral = NumericLiteralSupport.stripDiscriminator(String.valueOf(literalValue));
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
literalExpr.value = Double.parseDouble(numericLiteral);
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType);
return symTable.floatType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) referredType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.floatType;
}
private BType getTypeOfLiteralWithDecimalDiscriminator(BLangLiteral literalExpr, Object literalValue,
BType expType) {
literalExpr.value = NumericLiteralSupport.stripDiscriminator(String.valueOf(literalValue));
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
setLiteralValueForFiniteType(literalExpr, symTable.decimalType);
return symTable.decimalType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.decimalType;
}
private BType getTypeOfDecimalFloatingPointLiteral(BLangLiteral literalExpr, Object literalValue, BType expType) {
BType expectedType = Types.getReferredType(expType);
String numericLiteral = String.valueOf(literalValue);
if (expectedType.tag == TypeTags.DECIMAL) {
return symTable.decimalType;
} else if (expectedType.tag == TypeTags.FLOAT) {
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
return symTable.floatType;
} else if (expectedType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) expectedType;
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
if (literalAssignableToFiniteType(literalExpr, finiteType, tag)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.getTypeFromTag(tag));
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
}
} else if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
BType unionMember =
getAndSetAssignableUnionMember(literalExpr, unionType, symTable.getTypeFromTag(tag));
if (unionMember == symTable.floatType && !types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
} else if (unionMember != symTable.noType) {
return unionMember;
}
}
}
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
return symTable.floatType;
}
private BType getTypeOfHexFloatingPointLiteral(BLangLiteral literalExpr, Object literalValue, BType expType) {
String numericLiteral = String.valueOf(literalValue);
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
literalExpr.value = Double.parseDouble(numericLiteral);
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType);
return symTable.floatType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) referredType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.floatType;
}
public BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
Object literalValue = literalExpr.value;
BType expectedType = Types.getReferredType(expType);
if (literalExpr.getKind() == NodeKind.NUMERIC_LITERAL) {
NodeKind kind = ((BLangNumericLiteral) literalExpr).kind;
if (kind == NodeKind.INTEGER_LITERAL) {
return getIntegerLiteralType(literalExpr, literalValue, expectedType);
} else if (kind == NodeKind.DECIMAL_FLOATING_POINT_LITERAL) {
if (NumericLiteralSupport.isFloatDiscriminated(literalExpr.originalValue)) {
return getTypeOfLiteralWithFloatDiscriminator(literalExpr, literalValue, expectedType);
} else if (NumericLiteralSupport.isDecimalDiscriminated(literalExpr.originalValue)) {
return getTypeOfLiteralWithDecimalDiscriminator(literalExpr, literalValue, expectedType);
} else {
return getTypeOfDecimalFloatingPointLiteral(literalExpr, literalValue, expectedType);
}
} else {
return getTypeOfHexFloatingPointLiteral(literalExpr, literalValue, expectedType);
}
}
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expectedType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expectedType.tag == TypeTags.UNION) {
Set<BType> memberTypes = new HashSet<>(types.getAllTypes(expectedType, true));
for (BType memType : memberTypes) {
memType = Types.getReferredType(memType);
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expectedType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else {
if (expectedType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(expectedType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
boolean foundMember = types.getAllTypes(unionType, true)
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getTypeMatchingFloatOrDecimal(BType finiteType, Set<BType> memberTypes,
BLangLiteral literalExpr, BUnionType expType) {
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
if (finiteType == symTable.semanticError) {
BType type = symTable.getTypeFromTag(tag);
for (BType memType : memberTypes) {
if (memType.tag == tag) {
return setLiteralValueAndGetType(literalExpr, type);
}
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, type);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
}
return symTable.intType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
List<BType> members = types.getAllTypes(expType, true);
Set<BType> memberTypes = new HashSet<>();
members.forEach(member -> memberTypes.addAll(members));
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return desiredType;
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, desiredType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = types.getAllTypes(unionType, true).stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(BType expType, Object literalValue) {
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
case TypeTags.TYPEREFDESC:
return getIntLiteralType(Types.getReferredType(expType), literalValue);
default:
}
return symTable.intType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType);
resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
resultType = checkListConstructorCompatibility(expType, listConstructor);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) {
resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
resultType = tableType;
return;
}
BType applicableExpType = Types.getReferredType(expType);
applicableExpType = applicableExpType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) applicableExpType).effectiveType : applicableExpType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (this.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint);
if (recordType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) {
validateMapConstraintTable(applicableExpType);
return;
}
if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) {
resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, env, memType);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0));
return;
}
resultType = symTable.semanticError;
} else {
resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
List<BType> restFieldTypes = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
LinkedHashMap<String, List<BField>> fieldNameToFields = new LinkedHashMap<>();
for (BType memType : memTypes) {
BRecordType member = (BRecordType) memType;
for (Map.Entry<String, BField> entry : member.fields.entrySet()) {
String key = entry.getKey();
BField field = entry.getValue();
if (fieldNameToFields.containsKey(key)) {
fieldNameToFields.get(key).add(field);
} else {
fieldNameToFields.put(key, new ArrayList<>() {{
add(field);
}});
}
}
if (!member.sealed) {
restFieldTypes.add(member.restFieldType);
}
}
LinkedHashSet<BField> inferredFields = new LinkedHashSet<>();
int memTypesSize = memTypes.size();
for (Map.Entry<String, List<BField>> entry : fieldNameToFields.entrySet()) {
String fieldName = entry.getKey();
List<BField> fields = entry.getValue();
List<BType> types = new ArrayList<>();
for (BField field : fields) {
types.add(field.getType());
}
for (BType memType : memTypes) {
BRecordType bMemType = (BRecordType) memType;
if (bMemType.sealed || bMemType.fields.containsKey(fieldName)) {
continue;
}
BType restFieldType = bMemType.restFieldType;
types.add(restFieldType);
}
BField resultantField = createFieldWithType(fields.get(0), types);
boolean isOptional = hasOptionalFields(fields) || fields.size() != memTypesSize;
if (isOptional) {
resultantField.symbol.flags = Flags.OPTIONAL;
} else if (keySpecifierFieldNames.contains(fieldName)) {
resultantField.symbol.flags = Flags.REQUIRED | Flags.READONLY;
} else {
resultantField.symbol.flags = Flags.REQUIRED;
}
inferredFields.add(resultantField);
}
return createTableConstraintRecordType(inferredFields, restFieldTypes, tableConstructorExpr.pos);
}
/**
* Create a new {@code BField} out of existing {@code BField}, while changing its type.
* The new type is derived from the given list of bTypes.
*
* @param field - existing {@code BField}
* @param bTypes - list of bTypes
* @return a {@code BField}
*/
private BField createFieldWithType(BField field, List<BType> bTypes) {
BType resultantType = getResultantType(bTypes);
BVarSymbol originalSymbol = field.symbol;
BVarSymbol fieldSymbol = new BVarSymbol(originalSymbol.flags, originalSymbol.name, originalSymbol.pkgID,
resultantType, originalSymbol.owner, originalSymbol.pos, VIRTUAL);
return new BField(field.name, field.pos, fieldSymbol);
}
/**
* Get the resultant type from a {@code List<BType>}.
*
* @param bTypes bType list (size > 0)
* @return {@code BUnionType} if effective members in list is > 1. {@code BType} Otherwise.
*/
private BType getResultantType(List<BType> bTypes) {
LinkedHashSet<BType> bTypeSet = new LinkedHashSet<>(bTypes);
List<BType> flattenBTypes = new ArrayList<>(bTypes.size());
addFlattenMemberTypes(flattenBTypes, bTypeSet);
return getRepresentativeBroadType(flattenBTypes);
}
private void addFlattenMemberTypes(List<BType> flattenBTypes, LinkedHashSet<BType> bTypes) {
for (BType memberType : bTypes) {
BType bType;
switch (memberType.tag) {
case TypeTags.UNION:
addFlattenMemberTypes(flattenBTypes, ((BUnionType) memberType).getMemberTypes());
continue;
case TypeTags.TYPEREFDESC:
BType constraint = Types.getReferredType(memberType);
if (constraint.tag == TypeTags.UNION) {
addFlattenMemberTypes(flattenBTypes, ((BUnionType) constraint).getMemberTypes());
continue;
}
bType = constraint;
break;
default:
bType = memberType;
break;
}
flattenBTypes.add(bType);
}
}
private boolean hasOptionalFields(List<BField> fields) {
for (BField field : fields) {
if (field.symbol.getFlags().contains(Flag.OPTIONAL)) {
return true;
}
}
return false;
}
private BRecordType createTableConstraintRecordType(Set<BField> inferredFields, List<BType> restFieldTypes,
Location pos) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
for (BField field : inferredFields) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = inferredFields.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else {
recordType.restFieldType = getResultantType(restFieldTypes);
}
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType) {
BType constraint = Types.getReferredType(tableType.constraint);
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
resultType = symTable.semanticError;
return false;
}
return true;
}
private boolean validateKeySpecifierInTableConstructor(BTableType tableType,
List<BLangRecordLiteral> recordLiterals) {
List<String> fieldNameList = tableType.fieldNameList;
if (fieldNameList != null) {
return validateTableKeyValue(fieldNameList, recordLiterals);
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangExpression recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField)) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangExpression getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
if (recordField.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField.valueExpr;
}
} else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (fieldName.equals(((BLangRecordVarNameField) recordField).variableName.value)) {
return (BLangRecordLiteral.BLangRecordVarNameField) recordField;
}
} else if (recordField.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) recordField;
BType spreadOpExprType = Types.getReferredType(spreadOperatorField.expr.getBType());
if (spreadOpExprType.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) spreadOpExprType;
for (BField recField : recordType.fields.values()) {
if (fieldName.equals(recField.name.value)) {
return recordLiteral;
}
}
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr,
BTableType tableType) {
BType constraintType = Types.getReferredType(tableType.constraint);
List<String> fieldNameList = new ArrayList<>();
boolean isKeySpecifierEmpty = tableConstructorExpr.tableKeySpecifier == null;
if (!isKeySpecifierEmpty) {
fieldNameList.addAll(getTableKeyNameList(tableConstructorExpr.tableKeySpecifier));
if (tableType.fieldNameList == null &&
!validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
return false;
}
if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
keyTypeConstraint = Types.getReferredType(keyTypeConstraint);
List<BType> memberTypes = new ArrayList<>();
switch (keyTypeConstraint.tag) {
case TypeTags.TUPLE:
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
break;
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) keyTypeConstraint).getFields();
memberTypes = fieldList.entrySet().stream()
.filter(e -> fieldNameList.contains(e.getKey())).map(entry -> entry.getValue().type)
.collect(Collectors.toList());
if (memberTypes.isEmpty()) {
memberTypes.add(keyTypeConstraint);
}
break;
default:
memberTypes.add(keyTypeConstraint);
}
if (isKeySpecifierEmpty && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (isKeySpecifierEmpty ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
if (isKeySpecifierEmpty) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_EMPTY_FOR_PROVIDED_KEY_CONSTRAINT, memberTypes);
} else {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes, tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList);
}
resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public void validateMapConstraintTable(BType expType) {
if (expType != null && (((BTableType) expType).fieldNameList != null ||
((BTableType) expType).keyTypeConstraint != null) &&
!expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames == null) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor);
}
if (tag == TypeTags.TYPEREFDESC) {
return checkListConstructorCompatibility(Types.getReferredType(bType), listConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor);
case TypeTags.TYPEDESC:
listConstructor.isTypedescExpr = true;
List<BType> actualTypes = new ArrayList<>();
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpExprType = checkExpr(spreadOpExpr, env, symTable.noType);
actualTypes.addAll(getListConstSpreadOpMemberTypes(expr.pos, spreadOpExprType));
continue;
}
BType resultType = checkExpr(expr, env, symTable.noType);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(resultType);
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType));
}
return symTable.semanticError;
}
private List<BType> getListConstSpreadOpMemberTypes(Location spreadMemberPos, BType spreadOpExprType) {
spreadOpExprType = Types.getReferredType(spreadOpExprType);
List<BType> types = new ArrayList<>();
if (spreadOpExprType.tag == TypeTags.TUPLE && isFixedLengthTuple((BTupleType) spreadOpExprType)) {
types.addAll(((BTupleType) spreadOpExprType).tupleTypes);
} else if (spreadOpExprType.tag == TypeTags.ARRAY &&
((BArrayType) spreadOpExprType).state == BArrayState.CLOSED) {
BArrayType bArrayType = (BArrayType) spreadOpExprType;
for (int i = 0; i < bArrayType.size; i++) {
types.add(bArrayType.eType);
}
} else {
dlog.error(spreadMemberPos, DiagnosticErrorCode.CANNOT_INFER_TYPE_FROM_SPREAD_OP);
types.add(symTable.semanticError);
}
return types;
}
private BType getListConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
case TypeTags.TYPEREFDESC:
return Types.getReferredType(type);
}
return symTable.semanticError;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
int memberTypeSize = memberTypes.size();
BType restType = tupleType.restType;
if (isFixedLengthTuple(tupleType)) {
int listExprSize = 0;
for (BLangExpression expr : exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
listExprSize++;
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
spreadOpType = Types.getReferredType(spreadOpType);
switch (spreadOpType.tag) {
case TypeTags.ARRAY:
int arraySize = ((BArrayType) spreadOpType).size;
if (arraySize >= 0) {
listExprSize += arraySize;
continue;
}
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
case TypeTags.TUPLE:
BTupleType tType = (BTupleType) spreadOpType;
if (isFixedLengthTuple(tType)) {
listExprSize += tType.tupleTypes.size();
continue;
}
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
}
}
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
memberTypes.get(i));
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.TUPLE_AND_EXPRESSION_SIZE_DOES_NOT_MATCH);
return symTable.semanticError;
}
}
boolean errored = false;
int nonRestTypeIndex = 0;
for (BLangExpression expr : exprs) {
int remainNonRestCount = memberTypeSize - nonRestTypeIndex;
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
if (remainNonRestCount > 0) {
errored |= exprIncompatible(memberTypes.get(nonRestTypeIndex), expr);
nonRestTypeIndex++;
} else {
errored |= exprIncompatible(restType, expr);
}
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
BType spreadOpReferredType = Types.getReferredType(spreadOpType);
switch (spreadOpReferredType.tag) {
case TypeTags.ARRAY:
BArrayType spreadOpArray = (BArrayType) spreadOpReferredType;
if (spreadOpArray.state == BArrayState.CLOSED) {
for (int i = 0; i < spreadOpArray.size && nonRestTypeIndex < memberTypeSize;
i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType,
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
if (remainNonRestCount < spreadOpArray.size) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType, restType)) {
return symTable.semanticError;
}
}
continue;
}
if (remainNonRestCount > 0) {
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_MEMBER_EXPECTED,
memberTypes.get(nonRestTypeIndex));
return symTable.semanticError;
}
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType, restType)) {
return symTable.semanticError;
}
break;
case TypeTags.TUPLE:
BTupleType spreadOpTuple = (BTupleType) spreadOpReferredType;
int spreadOpMemberTypeSize = spreadOpTuple.tupleTypes.size();
if (isFixedLengthTuple(spreadOpTuple)) {
for (int i = 0; i < spreadOpMemberTypeSize && nonRestTypeIndex < memberTypeSize;
i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i),
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
for (int i = remainNonRestCount; i < spreadOpMemberTypeSize; i++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i), restType)) {
return symTable.semanticError;
}
}
continue;
}
if (spreadOpMemberTypeSize < remainNonRestCount) {
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_MEMBER_EXPECTED,
memberTypes.get(nonRestTypeIndex + spreadOpMemberTypeSize));
return symTable.semanticError;
}
for (int i = 0; nonRestTypeIndex < memberTypeSize; i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i),
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
for (int i = nonRestTypeIndex; i < spreadOpMemberTypeSize; i++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i), restType)) {
return symTable.semanticError;
}
}
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.restType, restType)) {
return symTable.semanticError;
}
break;
default:
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_LIST_SPREAD_OP, spreadOpType);
return symTable.semanticError;
}
}
while (nonRestTypeIndex < memberTypeSize) {
if (!types.hasFillerValue(memberTypes.get(nonRestTypeIndex))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
memberTypes.get(nonRestTypeIndex));
return symTable.semanticError;
}
nonRestTypeIndex++;
}
return errored ? symTable.semanticError : tupleType;
}
private boolean isFixedLengthTuple(BTupleType bTupleType) {
return bTupleType.restType == null ||
types.isNeverTypeOrStructureTypeWithARequiredNeverMember(bTupleType.restType);
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorSpreadOpExpr) expr).expr;
}
if (exprIncompatible(symTable.readonlyType, expr)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (this.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) {
return checkExprList(exprs, env, symTable.noType);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = expType;
for (BLangExpression e : exprs) {
if (e.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) e).expr;
BType spreadOpExprType = checkExpr(spreadOpExpr, env, expType);
types.addAll(getListConstSpreadOpMemberTypes(e.pos, spreadOpExprType));
continue;
}
checkExpr(e, this.env, expType);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) {
List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral) {
int expTypeTag = Types.getReferredType(expType).tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral));
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) {
BType refType = Types.getReferredType(applicableMappingType);
if (applicableMappingType == symTable.semanticError ||
(refType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
refType.tag == TypeTags.RECORD ? ((BRecordType) refType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = env.enclPkg.symbol.pkgID;
Location pos = recordLiteral.pos;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (refType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) refType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) refType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
if (refType.tag == TypeTags.RECORD) {
BRecordType applicableRecordType = (BRecordType) refType;
BTypeSymbol applicableRecordTypeSymbol = applicableRecordType.tsymbol;
BLangUserDefinedType origTypeRef = new BLangUserDefinedType(
ASTBuilderUtil.createIdentifier(
pos,
TypeDefBuilderHelper.getPackageAlias(env, pos.lineRange().filePath(),
applicableRecordTypeSymbol.pkgID)),
ASTBuilderUtil.createIdentifier(pos, applicableRecordTypeSymbol.name.value));
origTypeRef.pos = pos;
origTypeRef.setBType(applicableRecordType);
recordTypeNode.typeRefs.add(origTypeRef);
} else if (refType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor);
}
if (tag == TypeTags.TYPEREFDESC) {
BType refType = Types.getReferredType(bType);
BType compatibleType = checkMappingConstructorCompatibility(refType, mappingConstructor);
return (refType.tag != TypeTags.UNION && refType.tag != TypeTags.INTERSECTION) ? bType : compatibleType;
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
case TypeTags.TYPEREFDESC:
return Types.getReferredType(type);
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return Types.getReferredType(type).tag == TypeTags.RECORD
|| Types.getReferredType(type).tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = types.getAllTypes(unionType, true).toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, Types.getReferredType(possibleType));
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) {
BType spreadType = Types.getReferredType(checkExpr(field.expr, env));
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = expType;
resultType = expType == symTable.noType ? symTable.nilType : expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(this.expType);
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), expType);
resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
varRefExpr.setBType(this.symTable.anyType);
varRefExpr.symbol = new BVarSymbol(0, true, varName,
names.originalNameFromIdNode(varRefExpr.variableName),
env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner,
varRefExpr.pos, VIRTUAL);
resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, env);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
BType expectedType = Types.getReferredType(expType);
if (symbolType != symTable.noType && expectedType.tag == TypeTags.FINITE ||
(expectedType.tag == TypeTags.UNION && types.getAllTypes(expectedType, true).stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
env.enclPkg.symbol.pkgID, null, env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, env);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
names.originalNameFromIdNode(recordRefField.variableName),
env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(),
env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr);
resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, env);
checkExpr(detailItem, env, detailItem.expr.getBType());
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, env);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node == null) {
return env;
}
NodeKind kind = env.enclEnv.node.getKind();
if (kind == NodeKind.ARROW_EXPR || kind == NodeKind.ON_FAIL) {
return env.enclEnv;
}
if (kind == NodeKind.CLASS_DEFN) {
return env.enclEnv.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null) {
NodeKind kind = env.enclEnv.node.getKind();
if (kind == NodeKind.ARROW_EXPR || kind == NodeKind.ON_FAIL || kind == NodeKind.CLASS_DEFN) {
return env.enclEnv;
}
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
checkFieldBasedAccess(nsPrefixedFieldBasedAccess, true);
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
checkFieldBasedAccess(fieldAccessExpr, false);
}
private void checkFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr, boolean isNsPrefixed) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env));
if (isNsPrefixed && !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
resultType = symTable.semanticError;
return;
}
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return env.enclInvokable != null && initializerFunc != null &&
env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (Types.getReferredType(type).tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) Types.getReferredType(type);
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) Types.getReferredType(type)).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = Types.getReferredType(expr.getBType());
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& Types.getReferredType(containerExpression.getBType()).tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, this.env, symTable.noType);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
Types.getReferredType(containerExpression.getBType()).tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && isConstExpr(indexExpr) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, this.env, symTable.noType);
BType varRefType = iExpr.expr.getBType();
visitInvocation(iExpr, varRefType);
}
private void visitInvocation(BLangInvocation iExpr, BType varRefType) {
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, this.env);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.TYPEREFDESC:
visitInvocation(iExpr, Types.getReferredType(varRefType));
break;
case TypeTags.INTERSECTION:
visitInvocation(iExpr, ((BIntersectionType) varRefType).effectiveType);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr);
List<BType> errorDetailTypes = new ArrayList<>(expandedCandidates.size());
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) Types.getReferredType(expandedCandidate)).detailType;
errorDetailTypes.add(Types.getReferredType(detailType));
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env);
int index = errorDetailTypes.indexOf(inferredDetailType);
BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null
|| Types.getReferredType(userProvidedTypeRef.getBType()) == Types.getReferredType(selectedCandidate))) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType);
resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType);
}
boolean validTypeRefFound = false;
BErrorType errorType;
if (userProvidedTypeRef != null
&& Types.getReferredType(userProvidedTypeRef.getBType()).tag == TypeTags.ERROR) {
errorType = (BErrorType) Types.getReferredType(userProvidedTypeRef.getBType());
validTypeRefFound = true;
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) Types.getReferredType(expandedCandidates.get(0));
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType);
BType detailType = errorType.detailType;
if (Types.getReferredType(detailType).tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) Types.getReferredType(detailType)).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (Types.getReferredType(detailType).tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) Types.getReferredType(errorType.detailType);
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(Types.getReferredType(userProvidedTypeRef.getBType()));
} else {
errorConstructorExpr.setBType(errorType);
}
BType resolvedType = errorConstructorExpr.getBType();
if (resolvedType != symTable.semanticError && expType != symTable.noType &&
!types.isAssignable(resolvedType, expType)) {
if (validTypeRefFound) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, userProvidedTypeRef);
} else {
dlog.error(errorConstructorExpr.pos,
DiagnosticErrorCode.ERROR_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
}
resultType = symTable.semanticError;
return;
}
resultType = resolvedType;
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType);
}
}
private BType checkExprSilent(BLangExpression expr, BType expType, SymbolEnv env) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(expr, env, expType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (Types.getReferredType(expType).tag == TypeTags.ERROR) {
return List.of(expType);
} else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(expType);
}
} else {
BType errorType = Types.getReferredType(errorTypeRef.getBType());
if (errorType.tag != TypeTags.ERROR) {
if (errorType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
errorConstructorExpr.errorTypeRef.setBType(symTable.semanticError);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
BType referredType = Types.getReferredType(candidateType);
List<BType> expandedCandidates = new ArrayList<>();
if (referredType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) referredType).getMemberTypes()) {
memberType = Types.getReferredType(memberType);
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (referredType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) referredType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, this.env, symTable.noType);
BLangExpression varRef = aInv.expr;
checkActionInvocation(aInv, varRef.getBType());
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BType type) {
switch (type.tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) type);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, this.env);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
resultType = symTable.semanticError;
break;
case TypeTags.TYPEREFDESC:
checkActionInvocation(aInv, Types.getReferredType(type));
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, type);
resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", letCount++)),
env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType);
types.checkType(letExpression, exprType, this.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, Types.getReferredType(varRefType));
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && Types.getReferredType(varRefType).tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(Types.getReferredType(varRefType).tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) {
BType type = checkExpr(iExpr.expr, env);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return false;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier),
Types.getReferredType(type).tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD,
invocationIdentifier, type);
return false;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
fieldSymbol.type);
return false;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression) {
BLangClassDefinition classNode = objectCtorExpression.classNode;
classNode.oceEnvData.capturedClosureEnv = env;
BLangClassDefinition originalClass = classNode.oceEnvData.originalClass;
if (originalClass.cloneRef != null && !objectCtorExpression.defined) {
classNode = (BLangClassDefinition) originalClass.cloneRef;
symbolEnter.defineClassDefinition(classNode, env);
objectCtorExpression.defined = true;
}
BObjectType objectType;
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (Types.getReferredType(objectCtorExpression.expectedType).tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) types
.getReferredType(objectCtorExpression.expectedType);
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
resultType = symTable.semanticError;
return;
}
}
}
BLangTypeInit cIExpr = objectCtorExpression.typeInit;
BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
BObjectType actualObjectType = (BObjectType) actualType;
List<BLangType> typeRefs = classNode.typeRefs;
SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, env);
classNode.oceEnvData.typeInit = objectCtorExpression.typeInit;
dlog.unmute();
if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true);
} else {
semanticAnalyzer.analyzeNode(classNode, typeDefEnv);
}
dlog.unmute();
markConstructedObjectIsolatedness(actualObjectType);
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private boolean isDefiniteObjectType(BType bType, Set<BTypeIdSet> typeIdSets) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
actualType = Types.getReferredType(actualType);
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (actualObjectType.classDef != null && actualObjectType.classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
if (cIExpr.initInvocation != null && actualObjectType.classDef.oceEnvData.typeInit != null) {
actualObjectType.classDef.oceEnvData.typeInit = cIExpr;
}
markConstructedObjectIsolatedness(actualObjectType);
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
resultType = symTable.semanticError;
return;
}
}
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, env, symTable.noType);
BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
actualType);
resultType = symTable.semanticError;
return;
}
resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.setBType(matchedType);
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, env.enclPkg.packageID,
streamType.constraint, env.scope.owner, pos, VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID,
recordType, env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType, false));
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
int objectCount = 0;
for (BType type : lhsUnionType.getMemberTypes()) {
BType memberType = Types.getReferredType(type);
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType type : lhsUnionType.getMemberTypes()) {
BType memberType = Types.getReferredType(type);
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
setResultTypeForWaitForAllExpr(waitForAllExpr, expType);
waitForAllExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType);
}
}
private void setResultTypeForWaitForAllExpr(BLangWaitForAllExpr waitForAllExpr, BType expType) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
case TypeTags.TYPEREFDESC:
setResultTypeForWaitForAllExpr(waitForAllExpr, Types.getReferredType(expType));
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos));
resultType = symTable.semanticError;
break;
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key),
names.originalNameFromIdNode(keyVal.key), env.enclPkg.packageID,
fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null,
pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) Types.getReferredType(expType)).fields;
if (((BRecordType) Types.getReferredType(expType)).sealed &&
rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos));
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) Types.getReferredType(expType)).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) Types.getReferredType(expType)).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type);
keyVal.keySymbol = lhsFields.get(key).symbol;
}
}
checkMissingReqFieldsForWait(((BRecordType) Types.getReferredType(expType)),
rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
setEventualTypeForExpression(expr, type);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression,
Location pos) {
if ((resultType == symTable.semanticError) ||
(types.containsErrorType(resultType))) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) {
if ((resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(resultType))) {
return;
}
if (types.containsErrorType(resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr)) {
return true;
}
return isReferencingNonWorker(rhsExpr);
}
private boolean isReferencingNonWorker(BLangExpression expression) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env, false);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
resultType = BUnionType.create(null, thenType, elseType);
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos);
}
waitExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType;
BType referredExpType = Types.getReferredType(binaryExpr.expectedType);
if (referredExpType.tag == TypeTags.FLOAT || referredExpType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(referredExpType)) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, env);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else {
rhsExprEnv = env;
}
BType rhsType;
if (referredExpType.tag == TypeTags.FLOAT || referredExpType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(referredExpType)) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr, env);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getRangeOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind,
lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private boolean isOptionalFloatOrDecimal(BType expectedType) {
if (expectedType.tag == TypeTags.UNION && expectedType.isNullable() && expectedType.tag != TypeTags.ANY) {
Iterator<BType> memberTypeIterator = ((BUnionType) expectedType).getMemberTypes().iterator();
while (memberTypeIterator.hasNext()) {
BType memberType = memberTypeIterator.next();
if (memberType.tag == TypeTags.FLOAT || memberType.tag == TypeTags.DECIMAL) {
return true;
}
}
}
return false;
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType);
} else {
return checkExpr(expr, env);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr) {
resultType = types.checkType(transactionalExpr, symTable.booleanType, expType);
}
public void visit(BLangCommitExpr commitExpr) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(commitExpr, actualType, expType);
}
private BType getXMLConstituents(BType bType) {
BType type = Types.getReferredType(bType);
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else if (types.isAssignable(lhsReturnType, rhsReturnType)) {
resultType = rhsReturnType;
} else {
resultType = BUnionType.create(null, lhsReturnType, rhsReturnType);
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
boolean decimalAddNegate = expType.tag == TypeTags.DECIMAL &&
(OperatorKind.ADD.equals(unaryExpr.operator) || OperatorKind.SUB.equals(unaryExpr.operator));
exprType = decimalAddNegate ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
symbol = symResolver.getUnaryOpsForTypeSets(unaryExpr.operator, exprType);
}
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, this.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
resultType = checkExpr(expr, env, this.expType);
}
return;
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, env));
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, env, targetType);
} else {
checkExpr(expr, env, symTable.noType);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, this.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = env.createClone();
BLangFunction function = bLangLambdaFunction.function;
symResolver.checkRedeclaredSymbols(bLangLambdaFunction);
if (!this.nonErrorLoggingCheck) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = Types.getReferredType(expType);
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
for (BLangSimpleVariable simpleVariable : bLangArrowFunction.params) {
if (simpleVariable.symbol != null) {
symResolver.checkForUniqueSymbol(simpleVariable.pos, env, simpleVariable.symbol);
}
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
this.expType);
if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public void muteErrorLog() {
this.nonErrorLoggingCheck = true;
this.dlog.mute();
}
public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) {
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
BType expType = Types.getReferredType(this.expType);
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
"XML Sequence");
resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
resultType = checkExpr(expressionItem, env, this.expType);
if (!xmlTypesInSequence.contains(resultType)) {
xmlTypesInSequence.add(resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
item = Types.getReferredType(item);
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
resultType = symTable.semanticError;
return;
}
}
resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType);
return;
}
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
this.expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType);
if (type == symTable.semanticError) {
resultType = type;
return;
}
BObjectType literalType = (BObjectType) Types.getReferredType(type);
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) Types.getReferredType(type);
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos) {
BType listType = Types.getReferredType(fieldType);
listType = listType.tag != TypeTags.INTERSECTION ? listType :
((BIntersectionType) listType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType bType) {
BType type = Types.getReferredType(bType);
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType bType, Location pos) {
BType expType = Types.getReferredType(bType);
if (expType.tag != TypeTags.UNION) {
return bType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
BType referredType = Types.getReferredType(expType);
if (referredType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc);
resultType = symTable.semanticError;
return;
}
resultType = referredType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
checkWithinQueryExpr = isWithinQuery();
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean cleanPrevEnvs = false;
if (prevEnvs.empty()) {
prevEnvs.push(env);
cleanPrevEnvs = true;
}
if (breakToParallelQueryEnv) {
queryEnvs.push(prevEnvs.peek());
} else {
queryEnvs.push(env);
}
queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this));
BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression,
collectionNode.getBType(), expType, queryExpr);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
if (cleanPrevEnvs) {
prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType)) {
resultType = symTable.semanticError;
return;
}
}
checkWithinQueryExpr = false;
resultType = actualType;
}
private boolean isWithinQuery() {
return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr) {
List<BType> resultTypes = types.getAllTypes(targetType, true).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true));
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true));
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType);
markReadOnlyForConstraintType(constraintType);
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) {
for (IdentifierNode identifier : fieldList) {
BField field = types.getTableConstraintField(constraintType, identifier.getValue());
if (field == null) {
dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER,
identifier.getValue(), constraintType);
} else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
field.symbol.flags |= Flags.READONLY;
}
}
}
private void markReadOnlyForConstraintType(BType constraintType) {
if (constraintType.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) constraintType;
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
return;
}
}
if (recordType.sealed) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC), env);
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation(
(BObjectType) Types.getReferredType(invokableSymbol.retType));
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType, true).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction) {
if (prevEnvs.empty()) {
prevEnvs.push(env);
} else {
prevEnvs.push(prevEnvs.peek());
}
queryEnvs.push(prevEnvs.peek());
BLangDoClause doClause = queryAction.getDoClause();
queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this));
semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek()));
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
prevEnvs.pop();
}
@Override
public void visit(BLangFromClause fromClause) {
boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop());
fromClause.env = fromEnv;
queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, queryEnvs.peek());
this.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause) {
boolean prevBreakEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop());
joinClause.env = joinEnv;
queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this);
}
this.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop());
letClause.env = letEnv;
queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
whereClause.env = handleFilterClauses(whereClause.expression);
}
@Override
public void visit(BLangSelectClause selectClause) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop());
selectClause.env = selectEnv;
queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop());
doClause.env = letEnv;
queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause) {
BType exprType = checkExpr(limitClause.expression, queryEnvs.peek());
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv);
onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek());
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.env = queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this);
}
}
public void visit(BLangOnFailClause onFailClause) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression) {
checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop());
queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, expType);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType);
}
BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(Types.getReferredType(exprType), symTable.errorType);
if (Types.getReferredType(exprType).tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.warning(checkedExpr.expr.pos,
DiagnosticWarningCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : types.getAllTypes(exprType, true)) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.warning(checkedExpr.expr.pos,
DiagnosticWarningCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) {
BType rhsType = getCandidateType(checkedExpr, type);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type, names.fromString(invocation.name.value), env);
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, env);
} else {
rhsType = checkExpr(clone, env, checkExprCandidateType);
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type, false)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env));
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType type) {
BType origTargetType = Types.getReferredType(type);
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, expType, env, symTable,
anonymousModelHelper, names, new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, expType, env, symTable,
anonymousModelHelper, names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) {
BLangInvokableNode encInvokable = env.enclInvokable;
BLangNode bLangNode = env.node;
if ((symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
bLangNode.getKind() != NodeKind.ARROW_EXPR && bLangNode.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA) &&
!encInvokable.flagSet.contains(Flag.OBJECT_CTOR)) {
return;
}
if (!symbol.closure) {
if (searchClosureVariableInExpressions(symbol, pos, env, encInvokable, bLangNode)) {
return;
}
}
BLangNode node = bLangNode;
if (isObjectCtorClass(node)) {
BLangClassDefinition classDef = (BLangClassDefinition) node;
OCEDynamicEnvironmentData oceData = classDef.oceEnvData;
BLangFunction currentFunc = (BLangFunction) encInvokable;
if ((currentFunc != null) && !currentFunc.attachedFunction &&
!(currentFunc.symbol.receiverSymbol == symbol)) {
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(oceData.capturedClosureEnv, symbol.name,
SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !resolvedSymbol.closure) {
if (resolvedSymbol.owner.getKind() != SymbolKind.PACKAGE) {
updateObjectCtorClosureSymbols(pos, currentFunc, resolvedSymbol, classDef);
return;
}
}
}
}
SymbolEnv cEnv = env;
while (node != null) {
if (node.getKind() == NodeKind.FUNCTION) {
BLangFunction function = (BLangFunction) node;
if (!function.flagSet.contains(Flag.OBJECT_CTOR) && !function.flagSet.contains(Flag.ATTACHED)) {
break;
}
}
if (!symbol.closure) {
if (searchClosureVariableInExpressions(symbol, pos, env, encInvokable, node)) {
return;
}
}
if (isObjectCtorClass(node)) {
BLangFunction currentFunction = (BLangFunction) encInvokable;
if ((currentFunction != null) && currentFunction.attachedFunction &&
(currentFunction.symbol.receiverSymbol == symbol)) {
return;
}
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE);
BLangClassDefinition classDef = (BLangClassDefinition) node;
if (resolvedSymbol != symTable.notFoundSymbol) {
if (resolvedSymbol.owner.getKind() == SymbolKind.PACKAGE) {
break;
}
updateObjectCtorClosureSymbols(pos, currentFunction, resolvedSymbol, classDef);
return;
}
break;
}
SymbolEnv enclEnv = cEnv.enclEnv;
if (enclEnv == null) {
break;
}
cEnv = enclEnv;
node = cEnv.node;
}
}
private boolean isObjectCtorClass(BLangNode node) {
return node.getKind() == NodeKind.CLASS_DEFN &&
((BLangClassDefinition) node).flagSet.contains(Flag.OBJECT_CTOR);
}
private boolean searchClosureVariableInExpressions(BSymbol symbol, Location pos, SymbolEnv env,
BLangInvokableNode encInvokable, BLangNode bLangNode) {
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
if (bLangNode.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) bLangNode).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) bLangNode).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
return false;
}
private void updateObjectCtorClosureSymbols(Location pos, BLangFunction currentFunction, BSymbol resolvedSymbol,
BLangClassDefinition classDef) {
classDef.hasClosureVars = true;
resolvedSymbol.closure = true;
if (currentFunction != null) {
currentFunction.closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
OCEDynamicEnvironmentData oceEnvData = classDef.oceEnvData;
if (currentFunction != null && (currentFunction.symbol.params.contains(resolvedSymbol)
|| (currentFunction.symbol.restParam == resolvedSymbol))) {
oceEnvData.closureFuncSymbols.add(resolvedSymbol);
} else {
oceEnvData.closureBlockSymbols.add(resolvedSymbol);
}
updateProceedingClasses(env.enclEnv, oceEnvData, classDef);
}
private void updateProceedingClasses(SymbolEnv envArg, OCEDynamicEnvironmentData oceEnvData,
BLangClassDefinition origClassDef) {
SymbolEnv localEnv = envArg;
while (localEnv != null) {
BLangNode node = localEnv.node;
if (node.getKind() == NodeKind.PACKAGE) {
break;
}
if (node.getKind() == NodeKind.CLASS_DEFN) {
BLangClassDefinition classDef = (BLangClassDefinition) node;
if (classDef != origClassDef) {
classDef.hasClosureVars = true;
OCEDynamicEnvironmentData parentOceData = classDef.oceEnvData;
oceEnvData.parents.push(classDef);
parentOceData.closureFuncSymbols.addAll(oceEnvData.closureFuncSymbols);
parentOceData.closureBlockSymbols.addAll(oceEnvData.closureBlockSymbols);
}
}
localEnv = localEnv.enclEnv;
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& !Symbols.isNative(funcSymbol);
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(errorConstructorExpr.namedArgs.size());
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = checkErrCtrTargetTypeAndSetSymbol(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, env, target);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, env);
} else {
checkExpr(namedArgsExpression, env, target);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType checkErrCtrTargetTypeAndSetSymbol(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
BType type = Types.getReferredType(expectedType);
if (type == symTable.semanticError) {
return symTable.semanticError;
}
if (type.tag == TypeTags.MAP) {
return ((BMapType) type).constraint;
}
if (type.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) type;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
namedArgsExpression.varSymbol = targetField.symbol;
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
iExpr.pos, env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
funcSymbol = invocableField;
iExpr.functionPointerInvocation = true;
}
}
if (funcSymbol == symTable.notFoundSymbol || Types.getReferredType(funcSymbol.type).tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) {
if (checkInvalidActionInvocation(aInv)) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType());
this.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver.resolveObjectMethod(aInv.pos, env,
remoteMethodQName, (BObjectTypeSymbol) Types.getReferredType(expType).tsymbol);
if (remoteFuncSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
aInv.pos, env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
remoteFuncSymbol = invocableField;
aInv.functionPointerInvocation = true;
}
}
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
this.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv);
}
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) {
return aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) !=
SymTag.ENDPOINT) && !aInv.async);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName, env);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(openIncRecordParams.get(0).type)).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
BType paramType = Types.getReferredType(paramSymbol.type);
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramType.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramType).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramType).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
BType invocableType = Types.getReferredType(iExpr.symbol.type);
if (invocableType.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invocableType).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(symbol.type)).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) Types.getReferredType(invokableSymbol.type);
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
int nonRestArgCount = nonRestArgs.size();
List<BVarSymbol> requiredParams = new ArrayList<>(nonRestParams.size() + nonRestArgCount);
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(incRecordParams.size() + nonRestArgCount);
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgCount; i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
BType actualType = arg.getBType();
if (Types.getReferredType(expectedType) == symTable.charStringType) {
arg.cloneAttempt++;
BLangExpression clonedArg = nodeCloner.cloneNode(arg);
BType argType = checkExprSilent(clonedArg, expectedType, env);
if (argType != symTable.semanticError) {
actualType = argType;
}
}
types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation);
((BLangNamedArgsExpression) arg).varSymbol = varSym;
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (Types.getReferredType(requiredParam.type) ==
Types.getReferredType(requiredIncRecordParam.owner.type)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
}
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
iExpr.restArgs.add(vararg);
restType = this.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation);
} else {
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
}
restType = this.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
} else {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, this.env, memType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType());
long invokableSymbolFlags = invokableSymbol.flags;
if (restType != symTable.semanticError && (Symbols.isFlagOn(invokableSymbolFlags, Flags.INTERFACE)
|| Symbols.isFlagOn(invokableSymbolFlags, Flags.NATIVE)) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
if (iExpr.argExprs.size() != 3) {
return;
}
BLangExpression keyFunction = iExpr.argExprs.get(2);
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType =
(BRecordType) Types.getReferredType(incRecordParamAllowAdditionalFields.type);
checkExpr(expr, env, incRecordType.restFieldType);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName),
null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, env, expectedType);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, env, expType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType);
types.checkType(arg.pos, inferredType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
checkExpr(arg, env, expectedType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case ELVIS_EXPR:
case TERNARY_EXPR:
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = typeSymbolPair.fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, this.env);
BType spreadExprType = Types.getReferredType(spreadExpr.getBType());
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, this.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, this.env);
BType spreadOpMemberType = checkSpreadFieldWithMapType(spreadOpType);
if (spreadOpMemberType.tag == symTable.semanticError.tag) {
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types, fieldType, env, symTable,
anonymousModelHelper, names, new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (this.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, this.env, fieldType);
}
private BType checkSpreadFieldWithMapType(BType spreadOpType) {
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
return getRepresentativeBroadType(types);
case TypeTags.MAP:
return ((BMapType) spreadOpType).constraint;
case TypeTags.TYPEREFDESC:
return checkSpreadFieldWithMapType(Types.getReferredType(spreadOpType));
default:
return symTable.semanticError;
}
}
private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey,
BRecordType recordType) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return new TypeSymbolPair(null, symTable.semanticError);
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes));
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return new TypeSymbolPair(null, symTable.semanticError);
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env, fieldName, recordType.tsymbol);
BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType);
return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type);
}
private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key,
BRecordType recordType) {
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) {
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (Symbols.isOptional(fieldSymbol) || fieldSymbol == symTable.notFoundSymbol) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, expType);
} else {
exprType = checkExpr(expr, xmlElementEnv);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON &&
!TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType type, Name fieldName) {
BType varRefType = Types.getReferredType(type);
if (varRefType.tag == TypeTags.RECORD) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, varRefType.tsymbol);
if (Symbols.isOptional(fieldSymbol) && !fieldSymbol.type.isNullable() && !fieldAccessExpr.isLValue) {
fieldAccessExpr.symbol = fieldSymbol;
return addNilForNillableAccessType(fieldSymbol.type);
}
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
for (BType memType : memberTypes) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, memType.tsymbol);
if (fieldSymbol.type.isNullable() &&
isFieldOptionalInRecords(((BUnionType) varRefType), fieldName, fieldAccessExpr)) {
return symTable.semanticError;
}
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private boolean isFieldOptionalInRecords(BUnionType unionType, Name fieldName,
BLangFieldBasedAccess fieldAccessExpr) {
Set<BType> memberTypes = unionType.getMemberTypes();
for (BType memType: memberTypes) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, memType.tsymbol);
if (Symbols.isOptional(fieldSymbol)) {
return true;
}
}
return false;
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType refType = Types.getReferredType(varRefType);
if (refType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) refType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) refType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return addNilForNillableAccessType(fieldType);
}
Set<BType> memberTypes = ((BUnionType) refType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes,
Name fieldName) {
RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics();
for (BType memberType : memberTypes) {
BRecordType recordMember = (BRecordType) Types.getReferredType(memberType);
if (recordMember.getFields().containsKey(fieldName.getValue())) {
if (isNilableType(fieldAccessExpr, memberType, fieldName)) {
recordUnionDiagnostics.nilableInRecords.add(recordMember);
}
} else {
recordUnionDiagnostics.undeclaredInRecords.add(recordMember);
}
}
return recordUnionDiagnostics;
}
private boolean isNilableType(BLangFieldBasedAccess fieldAccessExpr, BType memberType,
Name fieldName) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, memberType.tsymbol);
return fieldSymbol.type.isNullable();
}
private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BRecordType recordVarRefType = (BRecordType) varRefType;
boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue());
if (isFieldDeclared) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS);
} else if (recordVarRefType.sealed) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType);
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName,
varRefType);
}
} else {
LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName);
if (recUnionInfo.hasNilableAndUndeclared()) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.UNDECLARED_AND_NILABLE_FIELDS_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords),
recUnionInfo.recordsToString(recUnionInfo.nilableInRecords));
} else if (recUnionInfo.hasUndeclared()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords));
} else if (recUnionInfo.hasNilable()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.NILABLE_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.nilableInRecords));
}
}
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
varRefType = Types.getReferredType(varRefType);
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (Types.getReferredType(indexExpr.getBType()).tag == TypeTags.STRING
&& isConstExpr(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.INT && isConstExpr(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.charStringType;
actualType = symTable.charStringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, this.env, symTable.intType);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) Types.getReferredType(indexBasedAccessExpr.expr.getBType());
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(tableType.fieldNameList, tableType.constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, this.env, keyTypeConstraint);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, this.env, keyConstraintTypes.get(i));
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
switch (indexExpr.getKind()) {
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) indexExpr;
return getConstIndex(groupExpr.expression);
case NUMERIC_LITERAL:
return (Long) ((BLangLiteral) indexExpr).value;
default:
return (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
}
private String getConstFieldName(BLangExpression indexExpr) {
switch (indexExpr.getKind()) {
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) indexExpr;
return getConstFieldName(groupExpr.expression);
case LITERAL:
return (String) ((BLangLiteral) indexExpr).value;
default:
return (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConstExpr(indexExpr) || arrayType.state == BArrayState.OPEN) {
actualType = arrayType.eType;
break;
}
Long indexVal = getConstIndex(indexExpr);
actualType = indexVal >= arrayType.size || indexVal < 0 ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> Types.getReferredType(memType).tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) Types.getReferredType(matchedType))
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.TYPEREFDESC:
return checkArrayIndexBasedAccess(indexBasedAccess, Types.getReferredType(indexExprType),
arrayType);
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConstExpr(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
memType = Types.getReferredType(memType);
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
break;
case TypeTags.TYPEREFDESC:
return checkTupleIndexBasedAccess(accessExpr, tuple, Types.getReferredType(currentType));
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.MAP) {
BType constraint = Types.getReferredType(((BMapType) type).constraint);
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType());
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConstExpr(indexExpr)) {
String fieldName = Utils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
types.getAllTypes(currentType, true).forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
break;
case TypeTags.TYPEREFDESC:
return checkRecordIndexBasedAccess(accessExpr, record,
Types.getReferredType(currentType));
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConstExpr(BLangExpression expression) {
switch (expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
return true;
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) expression;
return isConstExpr(groupExpr.expression);
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
default:
return false;
}
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, env, symTable.stringType);
BType exprType = checkExpr(expression, env, expType);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) :
checkExpr(expression, env, expType),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType spreadOpType = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr,
env, expType);
BType type = Types.getReferredType(spreadOpType);
if (type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField),
varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) :
checkExpr(varNameField, env, expType),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(Iterable<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, childType,
env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private void handleObjectConstrExprForReadOnly(
BLangObjectConstructorExpression objectCtorExpr, BObjectType actualObjectType, SymbolEnv env,
boolean logErrors) {
BLangClassDefinition classDefForConstructor = objectCtorExpr.classNode;
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, objectCtorExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (actualObjectType.markedIsolatedness) {
return;
}
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
actualObjectType.markedIsolatedness = true;
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
private static class TypeSymbolPair {
private BVarSymbol fieldSymbol;
private BType determinedType;
public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) {
this.fieldSymbol = fieldSymbol;
this.determinedType = determinedType;
}
}
private static class RecordUnionDiagnostics {
Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>();
Set<BRecordType> nilableInRecords = new LinkedHashSet<>();
boolean hasUndeclared() {
return undeclaredInRecords.size() > 0;
}
boolean hasNilable() {
return nilableInRecords.size() > 0;
}
boolean hasNilableAndUndeclared() {
return nilableInRecords.size() > 0 && undeclaredInRecords.size() > 0;
}
String recordsToString(Set<BRecordType> recordTypeSet) {
StringBuilder recordNames = new StringBuilder();
int recordSetSize = recordTypeSet.size();
int index = 0;
for (BRecordType recordType : recordTypeSet) {
index++;
recordNames.append(recordType.tsymbol.getName().getValue());
if (recordSetSize > 1) {
if (index == recordSetSize - 1) {
recordNames.append("', and '");
} else if (index < recordSetSize) {
recordNames.append("', '");
}
}
}
return recordNames.toString();
}
}
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private NodeCloner nodeCloner;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
private BLangAnonymousModelHelper anonymousModelHelper;
private SemanticAnalyzer semanticAnalyzer;
private Unifier unifier;
private boolean nonErrorLoggingCheck = false;
private int letCount = 0;
private Stack<SymbolEnv> queryEnvs, prevEnvs;
private Stack<BLangNode> queryFinalClauses;
private boolean checkWithinQueryExpr = false;
private BLangMissingNodesHelper missingNodesHelper;
private boolean breakToParallelQueryEnv = false;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.queryFinalClauses = new Stack<>();
this.queryEnvs = new Stack<>();
this.prevEnvs = new Stack<>();
this.unifier = new Unifier();
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
BType referredExpType = Types.getReferredType(expType);
if (referredExpType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) referredExpType).effectiveType;
}
expr.expectedType = expType;
expr.accept(this);
BType resultRefType = Types.getReferredType(resultType);
if (resultRefType.tag == TypeTags.INTERSECTION) {
resultType = ((BIntersectionType) resultRefType).effectiveType;
}
expr.setTypeCheckedType(resultType);
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr);
return resultType;
}
private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) {
if (!nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
}
private void validateAndSetExprExpectedType(BLangExpression expr) {
if (resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
Types.getReferredType(expr.expectedType).tag == TypeTags.MAP
&& Types.getReferredType(expr.getBType()).tag == TypeTags.RECORD) {
return;
}
expr.expectedType = resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
checkXMLNamespacePrefixes(xmlElementAccess.filters);
checkExpr(xmlElementAccess.expr, env, symTable.xmlType);
resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
checkXMLNamespacePrefixes(xmlNavigation.filters);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, env, symTable.intType);
}
BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType);
if (Types.getReferredType(exprType).tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
resultType = symTable.xmlType;
} else {
resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private int getPreferredMemberTypeTag(BFiniteType finiteType) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
int typeTag = Types.getReferredType(valueExpr.getBType()).tag;
if (typeTag > TypeTags.DECIMAL) {
continue;
}
for (int i = TypeTags.INT; i <= TypeTags.DECIMAL; i++) {
if (typeTag == i) {
return i;
}
}
}
return TypeTags.NONE;
}
private BType getFiniteTypeMatchWithIntType(BLangLiteral literalExpr, BFiniteType finiteType) {
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
setLiteralValueForFiniteType(literalExpr, symTable.intType);
return symTable.intType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
setLiteralValueForFiniteType(literalExpr, symTable.byteType);
return symTable.byteType;
} else {
for (int tag = TypeTags.SIGNED32_INT; tag <= TypeTags.UNSIGNED8_INT; tag++) {
if (literalAssignableToFiniteType(literalExpr, finiteType, tag)) {
setLiteralValueForFiniteType(literalExpr, symTable.getTypeFromTag(tag));
return symTable.getTypeFromTag(tag);
}
}
}
return symTable.noType;
}
private BType getFiniteTypeMatchWithIntLiteral(BLangLiteral literalExpr, BFiniteType finiteType,
Object literalValue) {
BType intLiteralType = getFiniteTypeMatchWithIntType(literalExpr, finiteType);
if (intLiteralType != symTable.noType) {
return intLiteralType;
}
int typeTag = getPreferredMemberTypeTag(finiteType);
if (typeTag == TypeTags.NONE) {
return symTable.intType;
}
if (literalAssignableToFiniteType(literalExpr, finiteType, typeTag)) {
BType type = symTable.getTypeFromTag(typeTag);
setLiteralValueForFiniteType(literalExpr, type);
literalExpr.value = String.valueOf(literalValue);
return type;
}
return symTable.intType;
}
private BType getIntegerLiteralType(BLangLiteral literalExpr, Object literalValue, BType expType) {
BType expectedType = Types.getReferredType(expType);
if (expectedType.tag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(expectedType.tag)) {
return getIntLiteralType(expType, literalValue);
} else if (expectedType.tag == TypeTags.FLOAT) {
literalExpr.value = ((Long) literalValue).doubleValue();
return symTable.floatType;
} else if (expectedType.tag == TypeTags.DECIMAL) {
literalExpr.value = String.valueOf(literalValue);
return symTable.decimalType;
} else if (expectedType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) expectedType;
return getFiniteTypeMatchWithIntLiteral(literalExpr, finiteType, literalValue);
} else if (expectedType.tag == TypeTags.UNION) {
for (BType memType : types.getAllTypes(expectedType, true)) {
BType memberRefType = Types.getReferredType(memType);
if (TypeTags.isIntegerTypeTag(memberRefType.tag) || memberRefType.tag == TypeTags.BYTE) {
BType intLiteralType = getIntLiteralType(memType, literalValue);
if (intLiteralType == memberRefType) {
return intLiteralType;
}
} else if (memberRefType.tag == TypeTags.JSON || memberRefType.tag == TypeTags.ANYDATA ||
memberRefType.tag == TypeTags.ANY) {
return symTable.intType;
}
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expectedType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
BType finiteTypeMatchingByte = getFiniteTypeWithValuesOfSingleType((BUnionType) expectedType,
symTable.byteType);
if (finiteTypeMatchingByte != symTable.semanticError) {
finiteType = finiteTypeMatchingByte;
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
Set<BType> memberTypes = ((BUnionType) expectedType).getMemberTypes();
return getTypeMatchingFloatOrDecimal(finiteType, memberTypes, literalExpr, (BUnionType) expectedType);
}
return symTable.intType;
}
private BType getTypeOfLiteralWithFloatDiscriminator(BLangLiteral literalExpr, Object literalValue, BType expType) {
String numericLiteral = NumericLiteralSupport.stripDiscriminator(String.valueOf(literalValue));
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
literalExpr.value = Double.parseDouble(numericLiteral);
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType);
return symTable.floatType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) referredType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.floatType;
}
private BType getTypeOfLiteralWithDecimalDiscriminator(BLangLiteral literalExpr, Object literalValue,
BType expType) {
literalExpr.value = NumericLiteralSupport.stripDiscriminator(String.valueOf(literalValue));
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
setLiteralValueForFiniteType(literalExpr, symTable.decimalType);
return symTable.decimalType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.decimalType;
}
private BType getTypeOfDecimalFloatingPointLiteral(BLangLiteral literalExpr, Object literalValue, BType expType) {
BType expectedType = Types.getReferredType(expType);
String numericLiteral = String.valueOf(literalValue);
if (expectedType.tag == TypeTags.DECIMAL) {
return symTable.decimalType;
} else if (expectedType.tag == TypeTags.FLOAT) {
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
return symTable.floatType;
} else if (expectedType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) expectedType;
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
if (literalAssignableToFiniteType(literalExpr, finiteType, tag)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.getTypeFromTag(tag));
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
}
} else if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
BType unionMember =
getAndSetAssignableUnionMember(literalExpr, unionType, symTable.getTypeFromTag(tag));
if (unionMember == symTable.floatType && !types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
} else if (unionMember != symTable.noType) {
return unionMember;
}
}
}
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
return symTable.floatType;
}
private BType getTypeOfHexFloatingPointLiteral(BLangLiteral literalExpr, Object literalValue, BType expType) {
String numericLiteral = String.valueOf(literalValue);
if (!types.validateFloatLiteral(literalExpr.pos, numericLiteral)) {
resultType = symTable.semanticError;
return resultType;
}
literalExpr.value = Double.parseDouble(numericLiteral);
BType referredType = Types.getReferredType(expType);
if (referredType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) referredType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
setLiteralValueForFiniteType(literalExpr, symTable.floatType);
return symTable.floatType;
}
} else if (referredType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) referredType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
return symTable.floatType;
}
public BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
Object literalValue = literalExpr.value;
BType expectedType = Types.getReferredType(expType);
if (literalExpr.getKind() == NodeKind.NUMERIC_LITERAL) {
NodeKind kind = ((BLangNumericLiteral) literalExpr).kind;
if (kind == NodeKind.INTEGER_LITERAL) {
return getIntegerLiteralType(literalExpr, literalValue, expectedType);
} else if (kind == NodeKind.DECIMAL_FLOATING_POINT_LITERAL) {
if (NumericLiteralSupport.isFloatDiscriminated(literalExpr.originalValue)) {
return getTypeOfLiteralWithFloatDiscriminator(literalExpr, literalValue, expectedType);
} else if (NumericLiteralSupport.isDecimalDiscriminated(literalExpr.originalValue)) {
return getTypeOfLiteralWithDecimalDiscriminator(literalExpr, literalValue, expectedType);
} else {
return getTypeOfDecimalFloatingPointLiteral(literalExpr, literalValue, expectedType);
}
} else {
return getTypeOfHexFloatingPointLiteral(literalExpr, literalValue, expectedType);
}
}
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expectedType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expectedType.tag == TypeTags.UNION) {
Set<BType> memberTypes = new HashSet<>(types.getAllTypes(expectedType, true));
for (BType memType : memberTypes) {
memType = Types.getReferredType(memType);
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expectedType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else {
if (expectedType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(expectedType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
boolean foundMember = types.getAllTypes(unionType, true)
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getTypeMatchingFloatOrDecimal(BType finiteType, Set<BType> memberTypes,
BLangLiteral literalExpr, BUnionType expType) {
for (int tag = TypeTags.FLOAT; tag <= TypeTags.DECIMAL; tag++) {
if (finiteType == symTable.semanticError) {
BType type = symTable.getTypeFromTag(tag);
for (BType memType : memberTypes) {
if (memType.tag == tag) {
return setLiteralValueAndGetType(literalExpr, type);
}
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, type);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
}
return symTable.intType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
List<BType> members = types.getAllTypes(expType, true);
Set<BType> memberTypes = new HashSet<>();
members.forEach(member -> memberTypes.addAll(members));
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return desiredType;
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, desiredType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = types.getAllTypes(unionType, true).stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(BType expType, Object literalValue) {
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
case TypeTags.TYPEREFDESC:
return getIntLiteralType(Types.getReferredType(expType), literalValue);
default:
}
return symTable.intType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType);
resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
resultType = checkListConstructorCompatibility(expType, listConstructor);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) {
resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
resultType = tableType;
return;
}
BType applicableExpType = Types.getReferredType(expType);
applicableExpType = applicableExpType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) applicableExpType).effectiveType : applicableExpType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (this.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint);
if (recordType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.constraint.tag == TypeTags.MAP && expectedTableType.isTypeInlineDefined) {
validateMapConstraintTable(applicableExpType);
return;
}
if (!(validateKeySpecifierInTableConstructor((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) {
resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, env, memType);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0));
return;
}
resultType = symTable.semanticError;
} else {
resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
List<BType> restFieldTypes = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
LinkedHashMap<String, List<BField>> fieldNameToFields = new LinkedHashMap<>();
for (BType memType : memTypes) {
BRecordType member = (BRecordType) memType;
for (Map.Entry<String, BField> entry : member.fields.entrySet()) {
String key = entry.getKey();
BField field = entry.getValue();
if (fieldNameToFields.containsKey(key)) {
fieldNameToFields.get(key).add(field);
} else {
fieldNameToFields.put(key, new ArrayList<>() {{
add(field);
}});
}
}
if (!member.sealed) {
restFieldTypes.add(member.restFieldType);
}
}
LinkedHashSet<BField> inferredFields = new LinkedHashSet<>();
int memTypesSize = memTypes.size();
for (Map.Entry<String, List<BField>> entry : fieldNameToFields.entrySet()) {
String fieldName = entry.getKey();
List<BField> fields = entry.getValue();
List<BType> types = new ArrayList<>();
for (BField field : fields) {
types.add(field.getType());
}
for (BType memType : memTypes) {
BRecordType bMemType = (BRecordType) memType;
if (bMemType.sealed || bMemType.fields.containsKey(fieldName)) {
continue;
}
BType restFieldType = bMemType.restFieldType;
types.add(restFieldType);
}
BField resultantField = createFieldWithType(fields.get(0), types);
boolean isOptional = hasOptionalFields(fields) || fields.size() != memTypesSize;
if (isOptional) {
resultantField.symbol.flags = Flags.OPTIONAL;
} else if (keySpecifierFieldNames.contains(fieldName)) {
resultantField.symbol.flags = Flags.REQUIRED | Flags.READONLY;
} else {
resultantField.symbol.flags = Flags.REQUIRED;
}
inferredFields.add(resultantField);
}
return createTableConstraintRecordType(inferredFields, restFieldTypes, tableConstructorExpr.pos);
}
/**
* Create a new {@code BField} out of existing {@code BField}, while changing its type.
* The new type is derived from the given list of bTypes.
*
* @param field - existing {@code BField}
* @param bTypes - list of bTypes
* @return a {@code BField}
*/
private BField createFieldWithType(BField field, List<BType> bTypes) {
BType resultantType = getResultantType(bTypes);
BVarSymbol originalSymbol = field.symbol;
BVarSymbol fieldSymbol = new BVarSymbol(originalSymbol.flags, originalSymbol.name, originalSymbol.pkgID,
resultantType, originalSymbol.owner, originalSymbol.pos, VIRTUAL);
return new BField(field.name, field.pos, fieldSymbol);
}
/**
* Get the resultant type from a {@code List<BType>}.
*
* @param bTypes bType list (size > 0)
* @return {@code BUnionType} if effective members in list is > 1. {@code BType} Otherwise.
*/
private BType getResultantType(List<BType> bTypes) {
LinkedHashSet<BType> bTypeSet = new LinkedHashSet<>(bTypes);
List<BType> flattenBTypes = new ArrayList<>(bTypes.size());
addFlattenMemberTypes(flattenBTypes, bTypeSet);
return getRepresentativeBroadType(flattenBTypes);
}
private void addFlattenMemberTypes(List<BType> flattenBTypes, LinkedHashSet<BType> bTypes) {
for (BType memberType : bTypes) {
BType bType;
switch (memberType.tag) {
case TypeTags.UNION:
addFlattenMemberTypes(flattenBTypes, ((BUnionType) memberType).getMemberTypes());
continue;
case TypeTags.TYPEREFDESC:
BType constraint = Types.getReferredType(memberType);
if (constraint.tag == TypeTags.UNION) {
addFlattenMemberTypes(flattenBTypes, ((BUnionType) constraint).getMemberTypes());
continue;
}
bType = constraint;
break;
default:
bType = memberType;
break;
}
flattenBTypes.add(bType);
}
}
private boolean hasOptionalFields(List<BField> fields) {
for (BField field : fields) {
if (field.symbol.getFlags().contains(Flag.OPTIONAL)) {
return true;
}
}
return false;
}
private BRecordType createTableConstraintRecordType(Set<BField> inferredFields, List<BType> restFieldTypes,
Location pos) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
for (BField field : inferredFields) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = inferredFields.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else {
recordType.restFieldType = getResultantType(restFieldTypes);
}
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType) {
BType constraint = Types.getReferredType(tableType.constraint);
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
resultType = symTable.semanticError;
return false;
}
return true;
}
private boolean validateKeySpecifierInTableConstructor(BTableType tableType,
List<BLangRecordLiteral> recordLiterals) {
List<String> fieldNameList = tableType.fieldNameList;
if (fieldNameList != null) {
return validateTableKeyValue(fieldNameList, recordLiterals);
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangExpression recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField)) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangExpression getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
if (recordField.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField.valueExpr;
}
} else if (recordField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
if (fieldName.equals(((BLangRecordVarNameField) recordField).variableName.value)) {
return (BLangRecordLiteral.BLangRecordVarNameField) recordField;
}
} else if (recordField.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) recordField;
BType spreadOpExprType = Types.getReferredType(spreadOperatorField.expr.getBType());
if (spreadOpExprType.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) spreadOpExprType;
for (BField recField : recordType.fields.values()) {
if (fieldName.equals(recField.name.value)) {
return recordLiteral;
}
}
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr,
BTableType tableType) {
BType constraintType = Types.getReferredType(tableType.constraint);
List<String> fieldNameList = new ArrayList<>();
boolean isKeySpecifierEmpty = tableConstructorExpr.tableKeySpecifier == null;
if (!isKeySpecifierEmpty) {
fieldNameList.addAll(getTableKeyNameList(tableConstructorExpr.tableKeySpecifier));
if (tableType.fieldNameList == null &&
!validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
return false;
}
if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
keyTypeConstraint = Types.getReferredType(keyTypeConstraint);
List<BType> memberTypes = new ArrayList<>();
switch (keyTypeConstraint.tag) {
case TypeTags.TUPLE:
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
break;
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) keyTypeConstraint).getFields();
memberTypes = fieldList.entrySet().stream()
.filter(e -> fieldNameList.contains(e.getKey())).map(entry -> entry.getValue().type)
.collect(Collectors.toList());
if (memberTypes.isEmpty()) {
memberTypes.add(keyTypeConstraint);
}
break;
default:
memberTypes.add(keyTypeConstraint);
}
if (isKeySpecifierEmpty && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (isKeySpecifierEmpty ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
if (isKeySpecifierEmpty) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_EMPTY_FOR_PROVIDED_KEY_CONSTRAINT, memberTypes);
} else {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes, tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList);
}
resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public void validateMapConstraintTable(BType expType) {
if (expType != null && (((BTableType) expType).fieldNameList != null ||
((BTableType) expType).keyTypeConstraint != null) &&
!expType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames == null) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor);
}
if (tag == TypeTags.TYPEREFDESC) {
return checkListConstructorCompatibility(Types.getReferredType(bType), listConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor);
case TypeTags.TYPEDESC:
listConstructor.isTypedescExpr = true;
List<BType> actualTypes = new ArrayList<>();
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpExprType = checkExpr(spreadOpExpr, env, symTable.noType);
actualTypes.addAll(getListConstSpreadOpMemberTypes(expr.pos, spreadOpExprType));
continue;
}
BType resultType = checkExpr(expr, env, symTable.noType);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(resultType);
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType));
}
return symTable.semanticError;
}
private List<BType> getListConstSpreadOpMemberTypes(Location spreadMemberPos, BType spreadOpExprType) {
spreadOpExprType = Types.getReferredType(spreadOpExprType);
List<BType> types = new ArrayList<>();
if (spreadOpExprType.tag == TypeTags.TUPLE && isFixedLengthTuple((BTupleType) spreadOpExprType)) {
types.addAll(((BTupleType) spreadOpExprType).tupleTypes);
} else if (spreadOpExprType.tag == TypeTags.ARRAY &&
((BArrayType) spreadOpExprType).state == BArrayState.CLOSED) {
BArrayType bArrayType = (BArrayType) spreadOpExprType;
for (int i = 0; i < bArrayType.size; i++) {
types.add(bArrayType.eType);
}
} else {
dlog.error(spreadMemberPos, DiagnosticErrorCode.CANNOT_INFER_TYPE_FROM_SPREAD_OP);
types.add(symTable.semanticError);
}
return types;
}
private BType getListConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
case TypeTags.TYPEREFDESC:
return Types.getReferredType(type);
}
return symTable.semanticError;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
int memberTypeSize = memberTypes.size();
BType restType = tupleType.restType;
if (isFixedLengthTuple(tupleType)) {
int listExprSize = 0;
for (BLangExpression expr : exprs) {
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
listExprSize++;
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
spreadOpType = Types.getReferredType(spreadOpType);
switch (spreadOpType.tag) {
case TypeTags.ARRAY:
int arraySize = ((BArrayType) spreadOpType).size;
if (arraySize >= 0) {
listExprSize += arraySize;
continue;
}
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
case TypeTags.TUPLE:
BTupleType tType = (BTupleType) spreadOpType;
if (isFixedLengthTuple(tType)) {
listExprSize += tType.tupleTypes.size();
continue;
}
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_LENGTH_LIST_EXPECTED);
return symTable.semanticError;
}
}
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
memberTypes.get(i));
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.TUPLE_AND_EXPRESSION_SIZE_DOES_NOT_MATCH);
return symTable.semanticError;
}
}
boolean errored = false;
int nonRestTypeIndex = 0;
for (BLangExpression expr : exprs) {
int remainNonRestCount = memberTypeSize - nonRestTypeIndex;
if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
if (remainNonRestCount > 0) {
errored |= exprIncompatible(memberTypes.get(nonRestTypeIndex), expr);
nonRestTypeIndex++;
} else {
errored |= exprIncompatible(restType, expr);
}
continue;
}
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) expr).expr;
BType spreadOpType = checkExpr(spreadOpExpr, this.env);
BType spreadOpReferredType = Types.getReferredType(spreadOpType);
switch (spreadOpReferredType.tag) {
case TypeTags.ARRAY:
BArrayType spreadOpArray = (BArrayType) spreadOpReferredType;
if (spreadOpArray.state == BArrayState.CLOSED) {
for (int i = 0; i < spreadOpArray.size && nonRestTypeIndex < memberTypeSize;
i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType,
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
if (remainNonRestCount < spreadOpArray.size) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType, restType)) {
return symTable.semanticError;
}
}
continue;
}
if (remainNonRestCount > 0) {
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_MEMBER_EXPECTED,
memberTypes.get(nonRestTypeIndex));
return symTable.semanticError;
}
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpArray.eType, restType)) {
return symTable.semanticError;
}
break;
case TypeTags.TUPLE:
BTupleType spreadOpTuple = (BTupleType) spreadOpReferredType;
int spreadOpMemberTypeSize = spreadOpTuple.tupleTypes.size();
if (isFixedLengthTuple(spreadOpTuple)) {
for (int i = 0; i < spreadOpMemberTypeSize && nonRestTypeIndex < memberTypeSize;
i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i),
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
for (int i = remainNonRestCount; i < spreadOpMemberTypeSize; i++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i), restType)) {
return symTable.semanticError;
}
}
continue;
}
if (spreadOpMemberTypeSize < remainNonRestCount) {
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INVALID_SPREAD_OP_FIXED_MEMBER_EXPECTED,
memberTypes.get(nonRestTypeIndex + spreadOpMemberTypeSize));
return symTable.semanticError;
}
for (int i = 0; nonRestTypeIndex < memberTypeSize; i++, nonRestTypeIndex++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i),
memberTypes.get(nonRestTypeIndex))) {
return symTable.semanticError;
}
}
for (int i = nonRestTypeIndex; i < spreadOpMemberTypeSize; i++) {
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.tupleTypes.get(i), restType)) {
return symTable.semanticError;
}
}
if (types.typeIncompatible(spreadOpExpr.pos, spreadOpTuple.restType, restType)) {
return symTable.semanticError;
}
break;
default:
dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_LIST_SPREAD_OP, spreadOpType);
return symTable.semanticError;
}
}
while (nonRestTypeIndex < memberTypeSize) {
if (!types.hasFillerValue(memberTypes.get(nonRestTypeIndex))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE,
memberTypes.get(nonRestTypeIndex));
return symTable.semanticError;
}
nonRestTypeIndex++;
}
return errored ? symTable.semanticError : tupleType;
}
private boolean isFixedLengthTuple(BTupleType bTupleType) {
return bTupleType.restType == null ||
types.isNeverTypeOrStructureTypeWithARequiredNeverMember(bTupleType.restType);
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorSpreadOpExpr) expr).expr;
}
if (exprIncompatible(symTable.readonlyType, expr)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (this.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) {
return checkExprList(exprs, env, symTable.noType);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = expType;
for (BLangExpression e : exprs) {
if (e.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BLangExpression spreadOpExpr = ((BLangListConstructorSpreadOpExpr) e).expr;
BType spreadOpExprType = checkExpr(spreadOpExpr, env, expType);
types.addAll(getListConstSpreadOpMemberTypes(e.pos, spreadOpExprType));
continue;
}
checkExpr(e, this.env, expType);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) {
List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral) {
int expTypeTag = Types.getReferredType(expType).tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral));
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) {
BType refType = Types.getReferredType(applicableMappingType);
if (applicableMappingType == symTable.semanticError ||
(refType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
refType.tag == TypeTags.RECORD ? ((BRecordType) refType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = env.enclPkg.symbol.pkgID;
Location pos = recordLiteral.pos;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (refType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) refType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) refType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
if (refType.tag == TypeTags.RECORD) {
BRecordType applicableRecordType = (BRecordType) refType;
BTypeSymbol applicableRecordTypeSymbol = applicableRecordType.tsymbol;
BLangUserDefinedType origTypeRef = new BLangUserDefinedType(
ASTBuilderUtil.createIdentifier(
pos,
TypeDefBuilderHelper.getPackageAlias(env, pos.lineRange().filePath(),
applicableRecordTypeSymbol.pkgID)),
ASTBuilderUtil.createIdentifier(pos, applicableRecordTypeSymbol.name.value));
origTypeRef.pos = pos;
origTypeRef.setBType(applicableRecordType);
recordTypeNode.typeRefs.add(origTypeRef);
} else if (refType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor);
}
if (tag == TypeTags.TYPEREFDESC) {
BType refType = Types.getReferredType(bType);
BType compatibleType = checkMappingConstructorCompatibility(refType, mappingConstructor);
return (refType.tag != TypeTags.UNION && refType.tag != TypeTags.INTERSECTION) ? bType : compatibleType;
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
case TypeTags.TYPEREFDESC:
return Types.getReferredType(type);
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return Types.getReferredType(type).tag == TypeTags.RECORD
|| Types.getReferredType(type).tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = types.getAllTypes(unionType, true).toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, Types.getReferredType(possibleType));
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) {
BType spreadType = Types.getReferredType(checkExpr(field.expr, env));
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = expType;
resultType = expType == symTable.noType ? symTable.nilType : expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(this.expType);
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), expType);
resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
varRefExpr.setBType(this.symTable.anyType);
varRefExpr.symbol = new BVarSymbol(0, true, varName,
names.originalNameFromIdNode(varRefExpr.variableName),
env.enclPkg.symbol.pkgID, varRefExpr.getBType(), env.scope.owner,
varRefExpr.pos, VIRTUAL);
resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, env);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
BType expectedType = Types.getReferredType(expType);
if (symbolType != symTable.noType && expectedType.tag == TypeTags.FINITE ||
(expectedType.tag == TypeTags.UNION && types.getAllTypes(expectedType, true).stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
env.enclPkg.symbol.pkgID, null, env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, env);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
names.originalNameFromIdNode(recordRefField.variableName),
env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, recordSymbol.getOriginalName(),
env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr);
resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, env);
checkExpr(detailItem, env, detailItem.expr.getBType());
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, env);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node == null) {
return env;
}
NodeKind kind = env.enclEnv.node.getKind();
if (kind == NodeKind.ARROW_EXPR || kind == NodeKind.ON_FAIL) {
return env.enclEnv;
}
if (kind == NodeKind.CLASS_DEFN) {
return env.enclEnv.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null) {
NodeKind kind = env.enclEnv.node.getKind();
if (kind == NodeKind.ARROW_EXPR || kind == NodeKind.ON_FAIL || kind == NodeKind.CLASS_DEFN) {
return env.enclEnv;
}
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
checkFieldBasedAccess(nsPrefixedFieldBasedAccess, true);
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
checkFieldBasedAccess(fieldAccessExpr, false);
}
private void checkFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr, boolean isNsPrefixed) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env));
if (isNsPrefixed && !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
resultType = symTable.semanticError;
return;
}
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return env.enclInvokable != null && initializerFunc != null &&
env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (Types.getReferredType(type).tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) Types.getReferredType(type);
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) Types.getReferredType(type)).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = Types.getReferredType(expr.getBType());
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& Types.getReferredType(containerExpression.getBType()).tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, this.env, symTable.noType);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
Types.getReferredType(containerExpression.getBType()).tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && isConstExpr(indexExpr) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, this.env, symTable.noType);
BType varRefType = iExpr.expr.getBType();
visitInvocation(iExpr, varRefType);
}
private void visitInvocation(BLangInvocation iExpr, BType varRefType) {
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, this.env);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.TYPEREFDESC:
visitInvocation(iExpr, Types.getReferredType(varRefType));
break;
case TypeTags.INTERSECTION:
visitInvocation(iExpr, ((BIntersectionType) varRefType).effectiveType);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr);
List<BType> errorDetailTypes = new ArrayList<>(expandedCandidates.size());
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) Types.getReferredType(expandedCandidate)).detailType;
errorDetailTypes.add(Types.getReferredType(detailType));
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env);
int index = errorDetailTypes.indexOf(inferredDetailType);
BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null
|| Types.getReferredType(userProvidedTypeRef.getBType()) == Types.getReferredType(selectedCandidate))) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType);
resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType);
}
boolean validTypeRefFound = false;
BErrorType errorType;
if (userProvidedTypeRef != null
&& Types.getReferredType(userProvidedTypeRef.getBType()).tag == TypeTags.ERROR) {
errorType = (BErrorType) Types.getReferredType(userProvidedTypeRef.getBType());
validTypeRefFound = true;
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) Types.getReferredType(expandedCandidates.get(0));
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType);
BType detailType = errorType.detailType;
if (Types.getReferredType(detailType).tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) Types.getReferredType(detailType)).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (Types.getReferredType(detailType).tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) Types.getReferredType(errorType.detailType);
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(Types.getReferredType(userProvidedTypeRef.getBType()));
} else {
errorConstructorExpr.setBType(errorType);
}
BType resolvedType = errorConstructorExpr.getBType();
if (resolvedType != symTable.semanticError && expType != symTable.noType &&
!types.isAssignable(resolvedType, expType)) {
if (validTypeRefFound) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, userProvidedTypeRef);
} else {
dlog.error(errorConstructorExpr.pos,
DiagnosticErrorCode.ERROR_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
}
resultType = symTable.semanticError;
return;
}
resultType = resolvedType;
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType);
}
}
private BType checkExprSilent(BLangExpression expr, BType expType, SymbolEnv env) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(expr, env, expType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (Types.getReferredType(expType).tag == TypeTags.ERROR) {
return List.of(expType);
} else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(expType);
}
} else {
BType errorType = Types.getReferredType(errorTypeRef.getBType());
if (errorType.tag != TypeTags.ERROR) {
if (errorType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
errorConstructorExpr.errorTypeRef.setBType(symTable.semanticError);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
BType referredType = Types.getReferredType(candidateType);
List<BType> expandedCandidates = new ArrayList<>();
if (referredType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) referredType).getMemberTypes()) {
memberType = Types.getReferredType(memberType);
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (referredType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) referredType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, this.env, symTable.noType);
BLangExpression varRef = aInv.expr;
checkActionInvocation(aInv, varRef.getBType());
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BType type) {
switch (type.tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) type);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, this.env);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
resultType = symTable.semanticError;
break;
case TypeTags.TYPEREFDESC:
checkActionInvocation(aInv, Types.getReferredType(type));
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, type);
resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", letCount++)),
env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType);
types.checkType(letExpression, exprType, this.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, Types.getReferredType(varRefType));
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && Types.getReferredType(varRefType).tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(Types.getReferredType(varRefType).tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) {
BType type = checkExpr(iExpr.expr, env);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return false;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier),
Types.getReferredType(type).tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD,
invocationIdentifier, type);
return false;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
fieldSymbol.type);
return false;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression) {
BLangClassDefinition classNode = objectCtorExpression.classNode;
classNode.oceEnvData.capturedClosureEnv = env;
BLangClassDefinition originalClass = classNode.oceEnvData.originalClass;
if (originalClass.cloneRef != null && !objectCtorExpression.defined) {
classNode = (BLangClassDefinition) originalClass.cloneRef;
symbolEnter.defineClassDefinition(classNode, env);
objectCtorExpression.defined = true;
}
BObjectType objectType;
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (Types.getReferredType(objectCtorExpression.expectedType).tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) types
.getReferredType(objectCtorExpression.expectedType);
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
resultType = symTable.semanticError;
return;
}
}
}
BLangTypeInit cIExpr = objectCtorExpression.typeInit;
BType actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
BObjectType actualObjectType = (BObjectType) actualType;
List<BLangType> typeRefs = classNode.typeRefs;
SymbolEnv typeDefEnv = SymbolEnv.createObjectConstructorObjectEnv(classNode, env);
classNode.oceEnvData.typeInit = objectCtorExpression.typeInit;
dlog.unmute();
if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, false);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(objectCtorExpression, actualObjectType, typeDefEnv, true);
} else {
semanticAnalyzer.analyzeNode(classNode, typeDefEnv);
}
dlog.unmute();
markConstructedObjectIsolatedness(actualObjectType);
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private boolean isDefiniteObjectType(BType bType, Set<BTypeIdSet> typeIdSets) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
actualType = Types.getReferredType(actualType);
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (actualObjectType.classDef != null && actualObjectType.classDef.flagSet.contains(Flag.OBJECT_CTOR)) {
if (cIExpr.initInvocation != null && actualObjectType.classDef.oceEnvData.typeInit != null) {
actualObjectType.classDef.oceEnvData.typeInit = cIExpr;
}
markConstructedObjectIsolatedness(actualObjectType);
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
resultType = symTable.semanticError;
return;
}
}
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, env, symTable.noType);
BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
actualType);
resultType = symTable.semanticError;
return;
}
resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.setBType(matchedType);
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, env.enclPkg.packageID,
streamType.constraint, env.scope.owner, pos, VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID,
recordType, env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType, false));
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
int objectCount = 0;
for (BType type : lhsUnionType.getMemberTypes()) {
BType memberType = Types.getReferredType(type);
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType type : lhsUnionType.getMemberTypes()) {
BType memberType = Types.getReferredType(type);
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
setResultTypeForWaitForAllExpr(waitForAllExpr, expType);
waitForAllExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType);
}
}
private void setResultTypeForWaitForAllExpr(BLangWaitForAllExpr waitForAllExpr, BType expType) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
case TypeTags.TYPEREFDESC:
setResultTypeForWaitForAllExpr(waitForAllExpr, Types.getReferredType(expType));
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos));
resultType = symTable.semanticError;
break;
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key),
names.originalNameFromIdNode(keyVal.key), env.enclPkg.packageID,
fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null,
pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) Types.getReferredType(expType)).fields;
if (((BRecordType) Types.getReferredType(expType)).sealed &&
rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos));
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) Types.getReferredType(expType)).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) Types.getReferredType(expType)).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type);
keyVal.keySymbol = lhsFields.get(key).symbol;
}
}
checkMissingReqFieldsForWait(((BRecordType) Types.getReferredType(expType)),
rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
setEventualTypeForExpression(expr, type);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression,
Location pos) {
if ((resultType == symTable.semanticError) ||
(types.containsErrorType(resultType))) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) {
if ((resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(resultType))) {
return;
}
if (types.containsErrorType(resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr)) {
return true;
}
return isReferencingNonWorker(rhsExpr);
}
private boolean isReferencingNonWorker(BLangExpression expression) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env, false);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
resultType = BUnionType.create(null, thenType, elseType);
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos);
}
waitExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType;
BType referredExpType = Types.getReferredType(binaryExpr.expectedType);
if (referredExpType.tag == TypeTags.FLOAT || referredExpType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(referredExpType)) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, env);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else {
rhsExprEnv = env;
}
BType rhsType;
if (referredExpType.tag == TypeTags.FLOAT || referredExpType.tag == TypeTags.DECIMAL ||
isOptionalFloatOrDecimal(referredExpType)) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryBitwiseOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr, env);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getRangeOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind,
lhsType, rhsType);
} else {
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private boolean isOptionalFloatOrDecimal(BType expectedType) {
if (expectedType.tag == TypeTags.UNION && expectedType.isNullable() && expectedType.tag != TypeTags.ANY) {
Iterator<BType> memberTypeIterator = ((BUnionType) expectedType).getMemberTypes().iterator();
while (memberTypeIterator.hasNext()) {
BType memberType = memberTypeIterator.next();
if (memberType.tag == TypeTags.FLOAT || memberType.tag == TypeTags.DECIMAL) {
return true;
}
}
}
return false;
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType);
} else {
return checkExpr(expr, env);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr) {
resultType = types.checkType(transactionalExpr, symTable.booleanType, expType);
}
public void visit(BLangCommitExpr commitExpr) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(commitExpr, actualType, expType);
}
private BType getXMLConstituents(BType bType) {
BType type = Types.getReferredType(bType);
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else if (types.isAssignable(lhsReturnType, rhsReturnType)) {
resultType = rhsReturnType;
} else {
resultType = BUnionType.create(null, lhsReturnType, rhsReturnType);
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
boolean decimalAddNegate = expType.tag == TypeTags.DECIMAL &&
(OperatorKind.ADD.equals(unaryExpr.operator) || OperatorKind.SUB.equals(unaryExpr.operator));
exprType = decimalAddNegate ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
symbol = symResolver.getUnaryOpsForTypeSets(unaryExpr.operator, exprType);
}
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, this.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
resultType = checkExpr(expr, env, this.expType);
}
return;
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, env));
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, env, targetType);
} else {
checkExpr(expr, env, symTable.noType);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, this.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = env.createClone();
BLangFunction function = bLangLambdaFunction.function;
symResolver.checkRedeclaredSymbols(bLangLambdaFunction);
if (!this.nonErrorLoggingCheck) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = Types.getReferredType(expType);
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
for (BLangSimpleVariable simpleVariable : bLangArrowFunction.params) {
if (simpleVariable.symbol != null) {
symResolver.checkForUniqueSymbol(simpleVariable.pos, env, simpleVariable.symbol);
}
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
this.expType);
if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public void muteErrorLog() {
this.nonErrorLoggingCheck = true;
this.dlog.mute();
}
public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) {
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
BType expType = Types.getReferredType(this.expType);
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
"XML Sequence");
resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
resultType = checkExpr(expressionItem, env, this.expType);
if (!xmlTypesInSequence.contains(resultType)) {
xmlTypesInSequence.add(resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
item = Types.getReferredType(item);
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
resultType = symTable.semanticError;
return;
}
}
resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType);
return;
}
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
this.expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType);
if (type == symTable.semanticError) {
resultType = type;
return;
}
BObjectType literalType = (BObjectType) Types.getReferredType(type);
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) Types.getReferredType(type);
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos) {
BType listType = Types.getReferredType(fieldType);
listType = listType.tag != TypeTags.INTERSECTION ? listType :
((BIntersectionType) listType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType bType) {
BType type = Types.getReferredType(bType);
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType bType, Location pos) {
BType expType = Types.getReferredType(bType);
if (expType.tag != TypeTags.UNION) {
return bType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
BType referredType = Types.getReferredType(expType);
if (referredType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc);
resultType = symTable.semanticError;
return;
}
resultType = referredType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
checkWithinQueryExpr = isWithinQuery();
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean cleanPrevEnvs = false;
if (prevEnvs.empty()) {
prevEnvs.push(env);
cleanPrevEnvs = true;
}
if (breakToParallelQueryEnv) {
queryEnvs.push(prevEnvs.peek());
} else {
queryEnvs.push(env);
}
queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this));
BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression,
collectionNode.getBType(), expType, queryExpr);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
if (cleanPrevEnvs) {
prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType)) {
resultType = symTable.semanticError;
return;
}
}
checkWithinQueryExpr = false;
resultType = actualType;
}
private boolean isWithinQuery() {
return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr) {
List<BType> resultTypes = types.getAllTypes(targetType, true).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true));
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true));
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
validateKeySpecifier(queryExpr.fieldNameIdentifierList, constraintType);
markReadOnlyForConstraintType(constraintType);
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private void validateKeySpecifier(List<IdentifierNode> fieldList, BType constraintType) {
for (IdentifierNode identifier : fieldList) {
BField field = types.getTableConstraintField(constraintType, identifier.getValue());
if (field == null) {
dlog.error(identifier.getPosition(), DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER,
identifier.getValue(), constraintType);
} else if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
field.symbol.flags |= Flags.READONLY;
}
}
}
private void markReadOnlyForConstraintType(BType constraintType) {
if (constraintType.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) constraintType;
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
return;
}
}
if (recordType.sealed) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC), env);
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation(
(BObjectType) Types.getReferredType(invokableSymbol.retType));
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType, true).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction) {
if (prevEnvs.empty()) {
prevEnvs.push(env);
} else {
prevEnvs.push(prevEnvs.peek());
}
queryEnvs.push(prevEnvs.peek());
BLangDoClause doClause = queryAction.getDoClause();
queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this));
semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek()));
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
prevEnvs.pop();
}
@Override
public void visit(BLangFromClause fromClause) {
boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop());
fromClause.env = fromEnv;
queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, queryEnvs.peek());
this.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause) {
boolean prevBreakEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop());
joinClause.env = joinEnv;
queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this);
}
this.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop());
letClause.env = letEnv;
queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
whereClause.env = handleFilterClauses(whereClause.expression);
}
@Override
public void visit(BLangSelectClause selectClause) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop());
selectClause.env = selectEnv;
queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop());
doClause.env = letEnv;
queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause) {
BType exprType = checkExpr(limitClause.expression, queryEnvs.peek());
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv);
onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek());
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.env = queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this);
}
}
public void visit(BLangOnFailClause onFailClause) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression) {
checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop());
queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, expType);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType);
}
BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(Types.getReferredType(exprType), symTable.errorType);
if (Types.getReferredType(exprType).tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.warning(checkedExpr.expr.pos,
DiagnosticWarningCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : types.getAllTypes(exprType, true)) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.warning(checkedExpr.expr.pos,
DiagnosticWarningCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) {
BType rhsType = getCandidateType(checkedExpr, type);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type, names.fromString(invocation.name.value), env);
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, env);
} else {
rhsType = checkExpr(clone, env, checkExprCandidateType);
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type, false)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env));
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType type) {
BType origTargetType = Types.getReferredType(type);
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, expType, env, symTable,
anonymousModelHelper, names, new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, expType, env, symTable,
anonymousModelHelper, names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
public void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) {
BLangInvokableNode encInvokable = env.enclInvokable;
BLangNode bLangNode = env.node;
if ((symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
bLangNode.getKind() != NodeKind.ARROW_EXPR && bLangNode.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA) &&
!encInvokable.flagSet.contains(Flag.OBJECT_CTOR)) {
return;
}
if (!symbol.closure) {
if (searchClosureVariableInExpressions(symbol, pos, env, encInvokable, bLangNode)) {
return;
}
}
BLangNode node = bLangNode;
if (isObjectCtorClass(node)) {
BLangClassDefinition classDef = (BLangClassDefinition) node;
OCEDynamicEnvironmentData oceData = classDef.oceEnvData;
BLangFunction currentFunc = (BLangFunction) encInvokable;
if ((currentFunc != null) && !currentFunc.attachedFunction &&
!(currentFunc.symbol.receiverSymbol == symbol)) {
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(oceData.capturedClosureEnv, symbol.name,
SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !resolvedSymbol.closure) {
if (resolvedSymbol.owner.getKind() != SymbolKind.PACKAGE) {
updateObjectCtorClosureSymbols(pos, currentFunc, resolvedSymbol, classDef);
return;
}
}
}
}
SymbolEnv cEnv = env;
while (node != null) {
if (node.getKind() == NodeKind.FUNCTION) {
BLangFunction function = (BLangFunction) node;
if (!function.flagSet.contains(Flag.OBJECT_CTOR) && !function.flagSet.contains(Flag.ATTACHED)) {
break;
}
}
if (!symbol.closure) {
if (searchClosureVariableInExpressions(symbol, pos, env, encInvokable, node)) {
return;
}
}
if (isObjectCtorClass(node)) {
BLangFunction currentFunction = (BLangFunction) encInvokable;
if ((currentFunction != null) && currentFunction.attachedFunction &&
(currentFunction.symbol.receiverSymbol == symbol)) {
return;
}
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE);
BLangClassDefinition classDef = (BLangClassDefinition) node;
if (resolvedSymbol != symTable.notFoundSymbol) {
if (resolvedSymbol.owner.getKind() == SymbolKind.PACKAGE) {
break;
}
updateObjectCtorClosureSymbols(pos, currentFunction, resolvedSymbol, classDef);
return;
}
break;
}
SymbolEnv enclEnv = cEnv.enclEnv;
if (enclEnv == null) {
break;
}
cEnv = enclEnv;
node = cEnv.node;
}
}
private boolean isObjectCtorClass(BLangNode node) {
return node.getKind() == NodeKind.CLASS_DEFN &&
((BLangClassDefinition) node).flagSet.contains(Flag.OBJECT_CTOR);
}
private boolean searchClosureVariableInExpressions(BSymbol symbol, Location pos, SymbolEnv env,
BLangInvokableNode encInvokable, BLangNode bLangNode) {
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
if (bLangNode.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) bLangNode).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) bLangNode).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol =
symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
return true;
}
}
return false;
}
private void updateObjectCtorClosureSymbols(Location pos, BLangFunction currentFunction, BSymbol resolvedSymbol,
BLangClassDefinition classDef) {
classDef.hasClosureVars = true;
resolvedSymbol.closure = true;
if (currentFunction != null) {
currentFunction.closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
OCEDynamicEnvironmentData oceEnvData = classDef.oceEnvData;
if (currentFunction != null && (currentFunction.symbol.params.contains(resolvedSymbol)
|| (currentFunction.symbol.restParam == resolvedSymbol))) {
oceEnvData.closureFuncSymbols.add(resolvedSymbol);
} else {
oceEnvData.closureBlockSymbols.add(resolvedSymbol);
}
updateProceedingClasses(env.enclEnv, oceEnvData, classDef);
}
private void updateProceedingClasses(SymbolEnv envArg, OCEDynamicEnvironmentData oceEnvData,
BLangClassDefinition origClassDef) {
SymbolEnv localEnv = envArg;
while (localEnv != null) {
BLangNode node = localEnv.node;
if (node.getKind() == NodeKind.PACKAGE) {
break;
}
if (node.getKind() == NodeKind.CLASS_DEFN) {
BLangClassDefinition classDef = (BLangClassDefinition) node;
if (classDef != origClassDef) {
classDef.hasClosureVars = true;
OCEDynamicEnvironmentData parentOceData = classDef.oceEnvData;
oceEnvData.parents.push(classDef);
parentOceData.closureFuncSymbols.addAll(oceEnvData.closureFuncSymbols);
parentOceData.closureBlockSymbols.addAll(oceEnvData.closureBlockSymbols);
}
}
localEnv = localEnv.enclEnv;
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& !Symbols.isNative(funcSymbol);
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(errorConstructorExpr.namedArgs.size());
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = checkErrCtrTargetTypeAndSetSymbol(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, env, target);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, env);
} else {
checkExpr(namedArgsExpression, env, target);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType checkErrCtrTargetTypeAndSetSymbol(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
BType type = Types.getReferredType(expectedType);
if (type == symTable.semanticError) {
return symTable.semanticError;
}
if (type.tag == TypeTags.MAP) {
return ((BMapType) type).constraint;
}
if (type.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) type;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
namedArgsExpression.varSymbol = targetField.symbol;
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
iExpr.pos, env, names.fromIdNode(iExpr.name), (BObjectTypeSymbol) objectType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
funcSymbol = invocableField;
iExpr.functionPointerInvocation = true;
}
}
if (funcSymbol == symTable.notFoundSymbol || Types.getReferredType(funcSymbol.type).tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) {
if (checkInvalidActionInvocation(aInv)) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, aInv.expr.getBType());
this.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver.resolveObjectMethod(aInv.pos, env,
remoteMethodQName, (BObjectTypeSymbol) Types.getReferredType(expType).tsymbol);
if (remoteFuncSymbol == symTable.notFoundSymbol) {
BSymbol invocableField = symResolver.resolveInvocableObjectField(
aInv.pos, env, names.fromIdNode(aInv.name), (BObjectTypeSymbol) expType.tsymbol);
if (invocableField != symTable.notFoundSymbol && invocableField.kind == SymbolKind.FUNCTION) {
remoteFuncSymbol = invocableField;
aInv.functionPointerInvocation = true;
}
}
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
this.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv);
}
private boolean checkInvalidActionInvocation(BLangInvocation.BLangActionInvocation aInv) {
return aInv.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(((((BLangSimpleVarRef) aInv.expr).symbol.tag & SymTag.ENDPOINT) !=
SymTag.ENDPOINT) && !aInv.async);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName, env);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(openIncRecordParams.get(0).type)).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
BType paramType = Types.getReferredType(paramSymbol.type);
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramType.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramType).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramType).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
BType invocableType = Types.getReferredType(iExpr.symbol.type);
if (invocableType.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invocableType).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(symbol.type)).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) Types.getReferredType(invokableSymbol.type);
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
int nonRestArgCount = nonRestArgs.size();
List<BVarSymbol> requiredParams = new ArrayList<>(nonRestParams.size() + nonRestArgCount);
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>(incRecordParams.size() + nonRestArgCount);
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgCount; i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
BType actualType = arg.getBType();
if (Types.getReferredType(expectedType) == symTable.charStringType) {
arg.cloneAttempt++;
BLangExpression clonedArg = nodeCloner.cloneNode(arg);
BType argType = checkExprSilent(clonedArg, expectedType, env);
if (argType != symTable.semanticError) {
actualType = argType;
}
}
types.checkType(arg.pos, actualType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation);
((BLangNamedArgsExpression) arg).varSymbol = varSym;
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (Types.getReferredType(requiredParam.type) ==
Types.getReferredType(requiredIncRecordParam.owner.type)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
nonRestParam.getOriginalName(), pkgID, paramType, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
}
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
iExpr.restArgs.add(vararg);
restType = this.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation);
} else {
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
}
restType = this.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
} else {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, this.env, memType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType());
long invokableSymbolFlags = invokableSymbol.flags;
if (restType != symTable.semanticError && (Symbols.isFlagOn(invokableSymbolFlags, Flags.INTERFACE)
|| Symbols.isFlagOn(invokableSymbolFlags, Flags.NATIVE)) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
if (iExpr.argExprs.size() != 3) {
return;
}
BLangExpression keyFunction = iExpr.argExprs.get(2);
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType =
(BRecordType) Types.getReferredType(incRecordParamAllowAdditionalFields.type);
checkExpr(expr, env, incRecordType.restFieldType);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), names.originalNameFromIdNode(argName),
null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, env, expectedType);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, env, expType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType);
types.checkType(arg.pos, inferredType, expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
checkExpr(arg, env, expectedType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case ELVIS_EXPR:
case TERNARY_EXPR:
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(key.expr, key.computedKey,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
key.fieldSymbol = typeSymbolPair.fieldSymbol;
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, this.env);
BType spreadExprType = Types.getReferredType(spreadExpr.getBType());
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BSymbol fieldSymbol = symResolver.resolveStructField(spreadExpr.pos, this.env, bField.name,
mappingType.tsymbol);
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, fieldSymbol, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
TypeSymbolPair typeSymbolPair = checkRecordLiteralKeyExpr(varNameField, false,
(BRecordType) mappingType);
fieldType = typeSymbolPair.determinedType;
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, this.env);
BType spreadOpMemberType = checkSpreadFieldWithMapType(spreadOpType);
if (spreadOpMemberType.tag == symTable.semanticError.tag) {
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types, fieldType, env, symTable,
anonymousModelHelper, names, new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (this.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, this.env, fieldType);
}
private BType checkSpreadFieldWithMapType(BType spreadOpType) {
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
return getRepresentativeBroadType(types);
case TypeTags.MAP:
return ((BMapType) spreadOpType).constraint;
case TypeTags.TYPEREFDESC:
return checkSpreadFieldWithMapType(Types.getReferredType(spreadOpType));
default:
return symTable.semanticError;
}
}
private TypeSymbolPair checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey,
BRecordType recordType) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return new TypeSymbolPair(null, symTable.semanticError);
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return new TypeSymbolPair(null, BUnionType.create(null, fieldTypes));
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return new TypeSymbolPair(null, symTable.semanticError);
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env, fieldName, recordType.tsymbol);
BType type = checkRecordLiteralKeyByName(keyExpr.pos, fieldSymbol, fieldName, recordType);
return new TypeSymbolPair(fieldSymbol instanceof BVarSymbol ? (BVarSymbol) fieldSymbol : null, type);
}
private BType checkRecordLiteralKeyByName(Location location, BSymbol fieldSymbol, Name key,
BRecordType recordType) {
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) {
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (Symbols.isOptional(fieldSymbol) || fieldSymbol == symTable.notFoundSymbol) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, expType);
} else {
exprType = checkExpr(expr, xmlElementEnv);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON &&
!TypeTags.isIntegerTypeTag(type.tag) && !TypeTags.isStringTypeTag(type.tag)) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType type, Name fieldName) {
BType varRefType = Types.getReferredType(type);
if (varRefType.tag == TypeTags.RECORD) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, varRefType.tsymbol);
if (Symbols.isOptional(fieldSymbol) && !fieldSymbol.type.isNullable() && !fieldAccessExpr.isLValue) {
fieldAccessExpr.symbol = fieldSymbol;
return addNilForNillableAccessType(fieldSymbol.type);
}
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
for (BType memType : memberTypes) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, memType.tsymbol);
if (fieldSymbol.type.isNullable() &&
isFieldOptionalInRecords(((BUnionType) varRefType), fieldName, fieldAccessExpr)) {
return symTable.semanticError;
}
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private boolean isFieldOptionalInRecords(BUnionType unionType, Name fieldName,
BLangFieldBasedAccess fieldAccessExpr) {
Set<BType> memberTypes = unionType.getMemberTypes();
for (BType memType: memberTypes) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, memType.tsymbol);
if (Symbols.isOptional(fieldSymbol)) {
return true;
}
}
return false;
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType refType = Types.getReferredType(varRefType);
if (refType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) refType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) refType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return addNilForNillableAccessType(fieldType);
}
Set<BType> memberTypes = ((BUnionType) refType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private RecordUnionDiagnostics checkRecordUnion(BLangFieldBasedAccess fieldAccessExpr, Set<BType> memberTypes,
Name fieldName) {
RecordUnionDiagnostics recordUnionDiagnostics = new RecordUnionDiagnostics();
for (BType memberType : memberTypes) {
BRecordType recordMember = (BRecordType) Types.getReferredType(memberType);
if (recordMember.getFields().containsKey(fieldName.getValue())) {
if (isNilableType(fieldAccessExpr, memberType, fieldName)) {
recordUnionDiagnostics.nilableInRecords.add(recordMember);
}
} else {
recordUnionDiagnostics.undeclaredInRecords.add(recordMember);
}
}
return recordUnionDiagnostics;
}
private boolean isNilableType(BLangFieldBasedAccess fieldAccessExpr, BType memberType,
Name fieldName) {
BSymbol fieldSymbol = symResolver.resolveStructField(fieldAccessExpr.pos, this.env,
fieldName, memberType.tsymbol);
return fieldSymbol.type.isNullable();
}
private void logRhsFieldAccExprErrors(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BRecordType recordVarRefType = (BRecordType) varRefType;
boolean isFieldDeclared = recordVarRefType.getFields().containsKey(fieldName.getValue());
if (isFieldDeclared) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.FIELD_ACCESS_CANNOT_BE_USED_TO_ACCESS_OPTIONAL_FIELDS);
} else if (recordVarRefType.sealed) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_RECORD, fieldName, varRefType);
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_IN_RECORD_TYPE, fieldName,
varRefType);
}
} else {
LinkedHashSet<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
RecordUnionDiagnostics recUnionInfo = checkRecordUnion(fieldAccessExpr, memberTypes, fieldName);
if (recUnionInfo.hasNilableAndUndeclared()) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.UNDECLARED_AND_NILABLE_FIELDS_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords),
recUnionInfo.recordsToString(recUnionInfo.nilableInRecords));
} else if (recUnionInfo.hasUndeclared()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDECLARED_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.undeclaredInRecords));
} else if (recUnionInfo.hasNilable()) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.NILABLE_FIELD_IN_UNION_OF_RECORDS, fieldName,
recUnionInfo.recordsToString(recUnionInfo.nilableInRecords));
}
}
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
varRefType = Types.getReferredType(varRefType);
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
logRhsFieldAccExprErrors(fieldAccessExpr, varRefType, fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (Types.getReferredType(indexExpr.getBType()).tag == TypeTags.STRING
&& isConstExpr(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.INT && isConstExpr(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.charStringType;
actualType = symTable.charStringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, this.env, symTable.intType);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) Types.getReferredType(indexBasedAccessExpr.expr.getBType());
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(tableType.fieldNameList, tableType.constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, this.env, keyTypeConstraint);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, this.env, keyConstraintTypes.get(i));
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
switch (indexExpr.getKind()) {
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) indexExpr;
return getConstIndex(groupExpr.expression);
case NUMERIC_LITERAL:
return (Long) ((BLangLiteral) indexExpr).value;
default:
return (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
}
private String getConstFieldName(BLangExpression indexExpr) {
switch (indexExpr.getKind()) {
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) indexExpr;
return getConstFieldName(groupExpr.expression);
case LITERAL:
return (String) ((BLangLiteral) indexExpr).value;
default:
return (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConstExpr(indexExpr) || arrayType.state == BArrayState.OPEN) {
actualType = arrayType.eType;
break;
}
Long indexVal = getConstIndex(indexExpr);
actualType = indexVal >= arrayType.size || indexVal < 0 ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> Types.getReferredType(memType).tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) Types.getReferredType(matchedType))
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.TYPEREFDESC:
return checkArrayIndexBasedAccess(indexBasedAccess, Types.getReferredType(indexExprType),
arrayType);
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConstExpr(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
memType = Types.getReferredType(memType);
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
break;
case TypeTags.TYPEREFDESC:
return checkTupleIndexBasedAccess(accessExpr, tuple, Types.getReferredType(currentType));
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.MAP) {
BType constraint = Types.getReferredType(((BMapType) type).constraint);
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType());
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConstExpr(indexExpr)) {
String fieldName = Utils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
types.getAllTypes(currentType, true).forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
break;
case TypeTags.TYPEREFDESC:
return checkRecordIndexBasedAccess(accessExpr, record,
Types.getReferredType(currentType));
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConstExpr(BLangExpression expression) {
switch (expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
return true;
case GROUP_EXPR:
BLangGroupExpr groupExpr = (BLangGroupExpr) expression;
return isConstExpr(groupExpr.expression);
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
default:
return false;
}
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, env, symTable.stringType);
BType exprType = checkExpr(expression, env, expType);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) :
checkExpr(expression, env, expType),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType spreadOpType = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr,
env, expType);
BType type = Types.getReferredType(spreadOpType);
if (type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField),
varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) :
checkExpr(varNameField, env, expType),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(Iterable<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, childType,
env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private void handleObjectConstrExprForReadOnly(
BLangObjectConstructorExpression objectCtorExpr, BObjectType actualObjectType, SymbolEnv env,
boolean logErrors) {
BLangClassDefinition classDefForConstructor = objectCtorExpr.classNode;
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, objectCtorExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (actualObjectType.markedIsolatedness) {
return;
}
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
actualObjectType.markedIsolatedness = true;
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
private static class TypeSymbolPair {
private BVarSymbol fieldSymbol;
private BType determinedType;
public TypeSymbolPair(BVarSymbol fieldSymbol, BType determinedType) {
this.fieldSymbol = fieldSymbol;
this.determinedType = determinedType;
}
}
private static class RecordUnionDiagnostics {
Set<BRecordType> undeclaredInRecords = new LinkedHashSet<>();
Set<BRecordType> nilableInRecords = new LinkedHashSet<>();
boolean hasUndeclared() {
return undeclaredInRecords.size() > 0;
}
boolean hasNilable() {
return nilableInRecords.size() > 0;
}
boolean hasNilableAndUndeclared() {
return nilableInRecords.size() > 0 && undeclaredInRecords.size() > 0;
}
String recordsToString(Set<BRecordType> recordTypeSet) {
StringBuilder recordNames = new StringBuilder();
int recordSetSize = recordTypeSet.size();
int index = 0;
for (BRecordType recordType : recordTypeSet) {
index++;
recordNames.append(recordType.tsymbol.getName().getValue());
if (recordSetSize > 1) {
if (index == recordSetSize - 1) {
recordNames.append("', and '");
} else if (index < recordSetSize) {
recordNames.append("', '");
}
}
}
return recordNames.toString();
}
}
} |
Does this mean it use a expr parition? | public boolean hasPartitionTransformedEvolution() {
return getNativeTable().spec().fields().stream().anyMatch(field -> field.transform().isVoid());
} | return getNativeTable().spec().fields().stream().anyMatch(field -> field.transform().isVoid()); | public boolean hasPartitionTransformedEvolution() {
return getNativeTable().spec().fields().stream().anyMatch(field -> field.transform().isVoid());
} | class IcebergTable extends Table {
private static final Logger LOG = LogManager.getLogger(IcebergTable.class);
private Optional<Snapshot> snapshot = Optional.empty();
private static final String JSON_KEY_ICEBERG_DB = "database";
private static final String JSON_KEY_ICEBERG_TABLE = "table";
private static final String JSON_KEY_RESOURCE_NAME = "resource";
private static final String JSON_KEY_ICEBERG_PROPERTIES = "icebergProperties";
private static final String PARQUET_FORMAT = "parquet";
private String catalogName;
@SerializedName(value = "dn")
private String remoteDbName;
@SerializedName(value = "tn")
private String remoteTableName;
@SerializedName(value = "rn")
private String resourceName;
@SerializedName(value = "prop")
private Map<String, String> icebergProperties = Maps.newHashMap();
private org.apache.iceberg.Table nativeTable;
private List<Column> partitionColumns;
private long refreshSnapshotTime = -1L;
private final AtomicLong partitionIdGen = new AtomicLong(0L);
public IcebergTable() {
super(TableType.ICEBERG);
}
public IcebergTable(long id, String srTableName, String catalogName, String resourceName, String remoteDbName,
String remoteTableName, List<Column> schema, org.apache.iceberg.Table nativeTable,
Map<String, String> icebergProperties) {
super(id, srTableName, TableType.ICEBERG, schema);
this.catalogName = catalogName;
this.resourceName = resourceName;
this.remoteDbName = remoteDbName;
this.remoteTableName = remoteTableName;
this.nativeTable = nativeTable;
this.icebergProperties = icebergProperties;
}
@Override
public String getCatalogName() {
return catalogName == null ? getResourceMappingCatalogName(resourceName, "iceberg") : catalogName;
}
public String getResourceName() {
return resourceName;
}
public String getRemoteDbName() {
return remoteDbName;
}
public String getRemoteTableName() {
return remoteTableName;
}
public Optional<Snapshot> getSnapshot() {
if (snapshot.isPresent()) {
return snapshot;
} else {
snapshot = Optional.ofNullable(getNativeTable().currentSnapshot());
return snapshot;
}
}
@Override
public String getUUID() {
if (CatalogMgr.isExternalCatalog(catalogName)) {
return String.join(".", catalogName, remoteDbName, remoteTableName,
((BaseTable) getNativeTable()).operations().current().uuid());
} else {
return Long.toString(id);
}
}
public List<Column> getPartitionColumns() {
if (partitionColumns == null) {
List<PartitionField> identityPartitionFields = this.getNativeTable().spec().fields().stream().
filter(partitionField -> partitionField.transform().isIdentity()).collect(Collectors.toList());
partitionColumns = identityPartitionFields.stream().map(partitionField -> getColumn(partitionField.name()))
.collect(Collectors.toList());
}
return partitionColumns;
}
public List<Column> getPartitionColumnsIncludeTransformed() {
List<Column> allPartitionColumns = new ArrayList<>();
for (PartitionField field : getNativeTable().spec().fields()) {
if (!field.transform().isIdentity() && hasPartitionTransformedEvolution()) {
continue;
}
String baseColumnName = nativeTable.schema().findColumnName(field.sourceId());
Column partitionCol = getColumn(baseColumnName);
allPartitionColumns.add(partitionCol);
}
return allPartitionColumns;
}
public long nextPartitionId() {
return partitionIdGen.getAndIncrement();
}
public List<Integer> partitionColumnIndexes() {
List<Column> partitionCols = getPartitionColumns();
return partitionCols.stream().map(col -> fullSchema.indexOf(col)).collect(Collectors.toList());
}
public List<Integer> getSortKeyIndexes() {
List<Integer> indexes = new ArrayList<>();
org.apache.iceberg.Table nativeTable = getNativeTable();
List<Types.NestedField> fields = nativeTable.schema().asStruct().fields();
List<Integer> sortFieldSourceIds = nativeTable.sortOrder().fields().stream()
.map(SortField::sourceId)
.collect(Collectors.toList());
for (int i = 0; i < fields.size(); i++) {
Types.NestedField field = fields.get(i);
if (sortFieldSourceIds.contains(field.fieldId())) {
indexes.add(i);
}
}
return indexes;
}
public void resetSnapshot() {
snapshot = Optional.empty();
}
public boolean isV2Format() {
return ((BaseTable) getNativeTable()).operations().current().formatVersion() > 1;
}
public boolean isUnPartitioned() {
return ((BaseTable) getNativeTable()).operations().current().spec().isUnpartitioned();
}
public List<String> getPartitionColumnNames() {
return getPartitionColumns().stream().filter(java.util.Objects::nonNull).map(Column::getName)
.collect(Collectors.toList());
}
@Override
public String getTableIdentifier() {
return Joiner.on(":").join(name, ((BaseTable) getNativeTable()).operations().current().uuid());
}
public IcebergCatalogType getCatalogType() {
return IcebergCatalogType.valueOf(icebergProperties.get(ICEBERG_CATALOG_TYPE));
}
public String getTableLocation() {
return getNativeTable().location();
}
public org.apache.iceberg.Table getNativeTable() {
if (nativeTable == null) {
IcebergTable resourceMappingTable = (IcebergTable) GlobalStateMgr.getCurrentState().getMetadataMgr()
.getTable(getCatalogName(), remoteDbName, remoteTableName);
if (resourceMappingTable == null) {
throw new StarRocksConnectorException("Can't find table %s.%s.%s",
getCatalogName(), remoteDbName, remoteTableName);
}
nativeTable = resourceMappingTable.getNativeTable();
}
return nativeTable;
}
public long getRefreshSnapshotTime() {
return refreshSnapshotTime;
}
public void setRefreshSnapshotTime(long refreshSnapshotTime) {
this.refreshSnapshotTime = refreshSnapshotTime;
}
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
Preconditions.checkNotNull(partitions);
TIcebergTable tIcebergTable = new TIcebergTable();
tIcebergTable.setLocation(nativeTable.location());
List<TColumn> tColumns = Lists.newArrayList();
for (Column column : getBaseSchema()) {
tColumns.add(column.toThrift());
}
tIcebergTable.setColumns(tColumns);
tIcebergTable.setIceberg_schema(IcebergApiConverter.getTIcebergSchema(nativeTable.schema()));
tIcebergTable.setPartition_column_names(getPartitionColumnNames());
if (!partitions.isEmpty()) {
TPartitionMap tPartitionMap = new TPartitionMap();
for (int i = 0; i < partitions.size(); i++) {
DescriptorTable.ReferencedPartitionInfo info = partitions.get(i);
PartitionKey key = info.getKey();
long partitionId = info.getId();
THdfsPartition tPartition = new THdfsPartition();
List<LiteralExpr> keys = key.getKeys();
tPartition.setPartition_key_exprs(keys.stream().map(Expr::treeToThrift).collect(Collectors.toList()));
tPartitionMap.putToPartitions(partitionId, tPartition);
}
try {
TSerializer serializer = new TSerializer(TBinaryProtocol::new);
byte[] bytes = serializer.serialize(tPartitionMap);
byte[] compressedBytes = Util.compress(bytes);
TCompressedPartitionMap tCompressedPartitionMap = new TCompressedPartitionMap();
tCompressedPartitionMap.setOriginal_len(bytes.length);
tCompressedPartitionMap.setCompressed_len(compressedBytes.length);
tCompressedPartitionMap.setCompressed_serialized_partitions(Base64.getEncoder().encodeToString(compressedBytes));
tIcebergTable.setCompressed_partitions(tCompressedPartitionMap);
} catch (TException | IOException ignore) {
tIcebergTable.setPartitions(tPartitionMap.getPartitions());
}
}
TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.ICEBERG_TABLE,
fullSchema.size(), 0, remoteTableName, remoteDbName);
tTableDescriptor.setIcebergTable(tIcebergTable);
return tTableDescriptor;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty(JSON_KEY_ICEBERG_DB, remoteDbName);
jsonObject.addProperty(JSON_KEY_ICEBERG_TABLE, remoteTableName);
if (!Strings.isNullOrEmpty(resourceName)) {
jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName);
}
if (!icebergProperties.isEmpty()) {
JsonObject jIcebergProperties = new JsonObject();
for (Map.Entry<String, String> entry : icebergProperties.entrySet()) {
jIcebergProperties.addProperty(entry.getKey(), entry.getValue());
}
jsonObject.add(JSON_KEY_ICEBERG_PROPERTIES, jIcebergProperties);
}
Text.writeString(out, jsonObject.toString());
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
String json = Text.readString(in);
JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();
remoteDbName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_DB).getAsString();
remoteTableName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_TABLE).getAsString();
resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString();
if (jsonObject.has(JSON_KEY_ICEBERG_PROPERTIES)) {
JsonObject jIcebergProperties = jsonObject.getAsJsonObject(JSON_KEY_ICEBERG_PROPERTIES);
for (Map.Entry<String, JsonElement> entry : jIcebergProperties.entrySet()) {
icebergProperties.put(entry.getKey(), entry.getValue().getAsString());
}
}
}
@Override
public boolean isSupported() {
return true;
}
@Override
public boolean supportInsert() {
return getNativeTable().properties().getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)
.equalsIgnoreCase(PARQUET_FORMAT);
}
@Override
public int hashCode() {
return com.google.common.base.Objects.hashCode(getCatalogName(), remoteDbName, getTableIdentifier());
}
@Override
public boolean equals(Object other) {
if (!(other instanceof IcebergTable)) {
return false;
}
IcebergTable otherTable = (IcebergTable) other;
String catalogName = getCatalogName();
String tableIdentifier = getTableIdentifier();
return Objects.equal(catalogName, otherTable.getCatalogName()) &&
Objects.equal(remoteDbName, otherTable.remoteDbName) &&
Objects.equal(tableIdentifier, otherTable.getTableIdentifier());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private long id;
private String srTableName;
private String catalogName;
private String resourceName;
private String remoteDbName;
private String remoteTableName;
private List<Column> fullSchema;
private Map<String, String> icebergProperties;
private org.apache.iceberg.Table nativeTable;
public Builder() {
}
public Builder setId(long id) {
this.id = id;
return this;
}
public Builder setSrTableName(String srTableName) {
this.srTableName = srTableName;
return this;
}
public Builder setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
}
public Builder setResourceName(String resourceName) {
this.resourceName = resourceName;
return this;
}
public Builder setRemoteDbName(String remoteDbName) {
this.remoteDbName = remoteDbName;
return this;
}
public Builder setRemoteTableName(String remoteTableName) {
this.remoteTableName = remoteTableName;
return this;
}
public Builder setFullSchema(List<Column> fullSchema) {
this.fullSchema = fullSchema;
return this;
}
public Builder setIcebergProperties(Map<String, String> icebergProperties) {
this.icebergProperties = icebergProperties;
return this;
}
public Builder setNativeTable(org.apache.iceberg.Table nativeTable) {
this.nativeTable = nativeTable;
return this;
}
public IcebergTable build() {
return new IcebergTable(id, srTableName, catalogName, resourceName, remoteDbName, remoteTableName,
fullSchema, nativeTable, icebergProperties);
}
}
} | class IcebergTable extends Table {
private static final Logger LOG = LogManager.getLogger(IcebergTable.class);
private Optional<Snapshot> snapshot = Optional.empty();
private static final String JSON_KEY_ICEBERG_DB = "database";
private static final String JSON_KEY_ICEBERG_TABLE = "table";
private static final String JSON_KEY_RESOURCE_NAME = "resource";
private static final String JSON_KEY_ICEBERG_PROPERTIES = "icebergProperties";
private static final String PARQUET_FORMAT = "parquet";
private String catalogName;
@SerializedName(value = "dn")
private String remoteDbName;
@SerializedName(value = "tn")
private String remoteTableName;
@SerializedName(value = "rn")
private String resourceName;
@SerializedName(value = "prop")
private Map<String, String> icebergProperties = Maps.newHashMap();
private org.apache.iceberg.Table nativeTable;
private List<Column> partitionColumns;
private long refreshSnapshotTime = -1L;
private final AtomicLong partitionIdGen = new AtomicLong(0L);
public IcebergTable() {
super(TableType.ICEBERG);
}
public IcebergTable(long id, String srTableName, String catalogName, String resourceName, String remoteDbName,
String remoteTableName, List<Column> schema, org.apache.iceberg.Table nativeTable,
Map<String, String> icebergProperties) {
super(id, srTableName, TableType.ICEBERG, schema);
this.catalogName = catalogName;
this.resourceName = resourceName;
this.remoteDbName = remoteDbName;
this.remoteTableName = remoteTableName;
this.nativeTable = nativeTable;
this.icebergProperties = icebergProperties;
}
@Override
public String getCatalogName() {
return catalogName == null ? getResourceMappingCatalogName(resourceName, "iceberg") : catalogName;
}
public String getResourceName() {
return resourceName;
}
public String getRemoteDbName() {
return remoteDbName;
}
public String getRemoteTableName() {
return remoteTableName;
}
public Optional<Snapshot> getSnapshot() {
if (snapshot.isPresent()) {
return snapshot;
} else {
snapshot = Optional.ofNullable(getNativeTable().currentSnapshot());
return snapshot;
}
}
@Override
public String getUUID() {
if (CatalogMgr.isExternalCatalog(catalogName)) {
return String.join(".", catalogName, remoteDbName, remoteTableName,
((BaseTable) getNativeTable()).operations().current().uuid());
} else {
return Long.toString(id);
}
}
public List<Column> getPartitionColumns() {
if (partitionColumns == null) {
List<PartitionField> identityPartitionFields = this.getNativeTable().spec().fields().stream().
filter(partitionField -> partitionField.transform().isIdentity()).collect(Collectors.toList());
partitionColumns = identityPartitionFields.stream().map(partitionField -> getColumn(partitionField.name()))
.collect(Collectors.toList());
}
return partitionColumns;
}
public List<Column> getPartitionColumnsIncludeTransformed() {
List<Column> allPartitionColumns = new ArrayList<>();
for (PartitionField field : getNativeTable().spec().fields()) {
if (!field.transform().isIdentity() && hasPartitionTransformedEvolution()) {
continue;
}
String baseColumnName = nativeTable.schema().findColumnName(field.sourceId());
Column partitionCol = getColumn(baseColumnName);
allPartitionColumns.add(partitionCol);
}
return allPartitionColumns;
}
public long nextPartitionId() {
return partitionIdGen.getAndIncrement();
}
public List<Integer> partitionColumnIndexes() {
List<Column> partitionCols = getPartitionColumns();
return partitionCols.stream().map(col -> fullSchema.indexOf(col)).collect(Collectors.toList());
}
public List<Integer> getSortKeyIndexes() {
List<Integer> indexes = new ArrayList<>();
org.apache.iceberg.Table nativeTable = getNativeTable();
List<Types.NestedField> fields = nativeTable.schema().asStruct().fields();
List<Integer> sortFieldSourceIds = nativeTable.sortOrder().fields().stream()
.map(SortField::sourceId)
.collect(Collectors.toList());
for (int i = 0; i < fields.size(); i++) {
Types.NestedField field = fields.get(i);
if (sortFieldSourceIds.contains(field.fieldId())) {
indexes.add(i);
}
}
return indexes;
}
public void resetSnapshot() {
snapshot = Optional.empty();
}
public boolean isV2Format() {
return ((BaseTable) getNativeTable()).operations().current().formatVersion() > 1;
}
public boolean isUnPartitioned() {
return ((BaseTable) getNativeTable()).operations().current().spec().isUnpartitioned();
}
public List<String> getPartitionColumnNames() {
return getPartitionColumns().stream().filter(java.util.Objects::nonNull).map(Column::getName)
.collect(Collectors.toList());
}
@Override
public String getTableIdentifier() {
return Joiner.on(":").join(name, ((BaseTable) getNativeTable()).operations().current().uuid());
}
public IcebergCatalogType getCatalogType() {
return IcebergCatalogType.valueOf(icebergProperties.get(ICEBERG_CATALOG_TYPE));
}
public String getTableLocation() {
return getNativeTable().location();
}
public org.apache.iceberg.Table getNativeTable() {
if (nativeTable == null) {
IcebergTable resourceMappingTable = (IcebergTable) GlobalStateMgr.getCurrentState().getMetadataMgr()
.getTable(getCatalogName(), remoteDbName, remoteTableName);
if (resourceMappingTable == null) {
throw new StarRocksConnectorException("Can't find table %s.%s.%s",
getCatalogName(), remoteDbName, remoteTableName);
}
nativeTable = resourceMappingTable.getNativeTable();
}
return nativeTable;
}
public long getRefreshSnapshotTime() {
return refreshSnapshotTime;
}
public void setRefreshSnapshotTime(long refreshSnapshotTime) {
this.refreshSnapshotTime = refreshSnapshotTime;
}
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
Preconditions.checkNotNull(partitions);
TIcebergTable tIcebergTable = new TIcebergTable();
tIcebergTable.setLocation(nativeTable.location());
List<TColumn> tColumns = Lists.newArrayList();
for (Column column : getBaseSchema()) {
tColumns.add(column.toThrift());
}
tIcebergTable.setColumns(tColumns);
tIcebergTable.setIceberg_schema(IcebergApiConverter.getTIcebergSchema(nativeTable.schema()));
tIcebergTable.setPartition_column_names(getPartitionColumnNames());
if (!partitions.isEmpty()) {
TPartitionMap tPartitionMap = new TPartitionMap();
for (int i = 0; i < partitions.size(); i++) {
DescriptorTable.ReferencedPartitionInfo info = partitions.get(i);
PartitionKey key = info.getKey();
long partitionId = info.getId();
THdfsPartition tPartition = new THdfsPartition();
List<LiteralExpr> keys = key.getKeys();
tPartition.setPartition_key_exprs(keys.stream().map(Expr::treeToThrift).collect(Collectors.toList()));
tPartitionMap.putToPartitions(partitionId, tPartition);
}
try {
TSerializer serializer = new TSerializer(TBinaryProtocol::new);
byte[] bytes = serializer.serialize(tPartitionMap);
byte[] compressedBytes = Util.compress(bytes);
TCompressedPartitionMap tCompressedPartitionMap = new TCompressedPartitionMap();
tCompressedPartitionMap.setOriginal_len(bytes.length);
tCompressedPartitionMap.setCompressed_len(compressedBytes.length);
tCompressedPartitionMap.setCompressed_serialized_partitions(Base64.getEncoder().encodeToString(compressedBytes));
tIcebergTable.setCompressed_partitions(tCompressedPartitionMap);
} catch (TException | IOException ignore) {
tIcebergTable.setPartitions(tPartitionMap.getPartitions());
}
}
TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.ICEBERG_TABLE,
fullSchema.size(), 0, remoteTableName, remoteDbName);
tTableDescriptor.setIcebergTable(tIcebergTable);
return tTableDescriptor;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty(JSON_KEY_ICEBERG_DB, remoteDbName);
jsonObject.addProperty(JSON_KEY_ICEBERG_TABLE, remoteTableName);
if (!Strings.isNullOrEmpty(resourceName)) {
jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName);
}
if (!icebergProperties.isEmpty()) {
JsonObject jIcebergProperties = new JsonObject();
for (Map.Entry<String, String> entry : icebergProperties.entrySet()) {
jIcebergProperties.addProperty(entry.getKey(), entry.getValue());
}
jsonObject.add(JSON_KEY_ICEBERG_PROPERTIES, jIcebergProperties);
}
Text.writeString(out, jsonObject.toString());
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
String json = Text.readString(in);
JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();
remoteDbName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_DB).getAsString();
remoteTableName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_TABLE).getAsString();
resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString();
if (jsonObject.has(JSON_KEY_ICEBERG_PROPERTIES)) {
JsonObject jIcebergProperties = jsonObject.getAsJsonObject(JSON_KEY_ICEBERG_PROPERTIES);
for (Map.Entry<String, JsonElement> entry : jIcebergProperties.entrySet()) {
icebergProperties.put(entry.getKey(), entry.getValue().getAsString());
}
}
}
@Override
public boolean isSupported() {
return true;
}
@Override
public boolean supportInsert() {
return getNativeTable().properties().getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)
.equalsIgnoreCase(PARQUET_FORMAT);
}
@Override
public int hashCode() {
return com.google.common.base.Objects.hashCode(getCatalogName(), remoteDbName, getTableIdentifier());
}
@Override
public boolean equals(Object other) {
if (!(other instanceof IcebergTable)) {
return false;
}
IcebergTable otherTable = (IcebergTable) other;
String catalogName = getCatalogName();
String tableIdentifier = getTableIdentifier();
return Objects.equal(catalogName, otherTable.getCatalogName()) &&
Objects.equal(remoteDbName, otherTable.remoteDbName) &&
Objects.equal(tableIdentifier, otherTable.getTableIdentifier());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private long id;
private String srTableName;
private String catalogName;
private String resourceName;
private String remoteDbName;
private String remoteTableName;
private List<Column> fullSchema;
private Map<String, String> icebergProperties;
private org.apache.iceberg.Table nativeTable;
public Builder() {
}
public Builder setId(long id) {
this.id = id;
return this;
}
public Builder setSrTableName(String srTableName) {
this.srTableName = srTableName;
return this;
}
public Builder setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
}
public Builder setResourceName(String resourceName) {
this.resourceName = resourceName;
return this;
}
public Builder setRemoteDbName(String remoteDbName) {
this.remoteDbName = remoteDbName;
return this;
}
public Builder setRemoteTableName(String remoteTableName) {
this.remoteTableName = remoteTableName;
return this;
}
public Builder setFullSchema(List<Column> fullSchema) {
this.fullSchema = fullSchema;
return this;
}
public Builder setIcebergProperties(Map<String, String> icebergProperties) {
this.icebergProperties = icebergProperties;
return this;
}
public Builder setNativeTable(org.apache.iceberg.Table nativeTable) {
this.nativeTable = nativeTable;
return this;
}
public IcebergTable build() {
return new IcebergTable(id, srTableName, catalogName, resourceName, remoteDbName, remoteTableName,
fullSchema, nativeTable, icebergProperties);
}
}
} |
shouldn't this also throw an `IllegalStateException` if `error == null` like `getRestartDelayMS` and `getVerticesToRestart`? | public Throwable getError() {
return error;
} | return error; | public Throwable getError() {
if (canRestart()) {
throw new IllegalStateException("Cannot get error when the restarting is accepted.");
} else {
return error;
}
} | class FailureHandlingResult {
/** Task vertices to restart to recover from the failure. */
private final Set<ExecutionVertexID> verticesToRestart;
/** Delay before the restarting can be conducted. */
private final long restartDelayMS;
/** Reason why the failure is not recoverable. */
private final Throwable error;
/**
* Creates a result of a set of tasks to restart to recover from the failure.
*
* @param verticesToRestart containing task vertices to restart to recover from the failure
* @param restartDelayMS indicate a delay before conducting the restart
*/
private FailureHandlingResult(Set<ExecutionVertexID> verticesToRestart, long restartDelayMS) {
checkState(restartDelayMS >= 0);
this.verticesToRestart = Collections.unmodifiableSet(checkNotNull(verticesToRestart));
this.restartDelayMS = restartDelayMS;
this.error = null;
}
/**
* Creates a result that the failure is not recoverable and no restarting should be conducted.
*
* @param error reason why the failure is not recoverable
*/
private FailureHandlingResult(Throwable error) {
this.verticesToRestart = null;
this.restartDelayMS = -1;
this.error = checkNotNull(error);
}
/**
* Returns the tasks to restart.
*
* @return the tasks to restart
*/
public Set<ExecutionVertexID> getVerticesToRestart() {
if (canRestart()) {
return verticesToRestart;
} else {
throw new IllegalStateException("Cannot get vertices to restart when the restarting is suppressed.");
}
}
/**
* Returns the delay before the restarting.
*
* @return the delay before the restarting
*/
public long getRestartDelayMS() {
if (canRestart()) {
return restartDelayMS;
} else {
throw new IllegalStateException("Cannot get restart delay when the restarting is suppressed.");
}
}
/**
* Returns whether the restarting can be conducted.
*
* @return whether the restarting can be conducted
*/
public boolean canRestart() {
return error == null;
}
/**
* Returns reason why the restarting cannot be conducted.
*
* @return reason why the restarting cannot be conducted
*/
/**
* Creates a result of a set of tasks to restart to recover from the failure.
*
* @param verticesToRestart containing task vertices to restart to recover from the failure
* @param restartDelayMS indicate a delay before conducting the restart
* @return result of a set of tasks to restart to recover from the failure
*/
public static FailureHandlingResult restartable(Set<ExecutionVertexID> verticesToRestart, long restartDelayMS) {
return new FailureHandlingResult(verticesToRestart, restartDelayMS);
}
/**
* Creates a result that the failure is not recoverable and no restarting should be conducted.
*
* @param error reason why the failure is not recoverable
* @return result indicating the failure is not recoverable
*/
public static FailureHandlingResult unrecoverable(Throwable error) {
return new FailureHandlingResult(error);
}
} | class FailureHandlingResult {
/** Task vertices to restart to recover from the failure. */
private final Set<ExecutionVertexID> verticesToRestart;
/** Delay before the restarting can be conducted. */
private final long restartDelayMS;
/** Reason why the failure is not recoverable. */
private final Throwable error;
/**
* Creates a result of a set of tasks to restart to recover from the failure.
*
* @param verticesToRestart containing task vertices to restart to recover from the failure
* @param restartDelayMS indicate a delay before conducting the restart
*/
private FailureHandlingResult(Set<ExecutionVertexID> verticesToRestart, long restartDelayMS) {
checkState(restartDelayMS >= 0);
this.verticesToRestart = Collections.unmodifiableSet(checkNotNull(verticesToRestart));
this.restartDelayMS = restartDelayMS;
this.error = null;
}
/**
* Creates a result that the failure is not recoverable and no restarting should be conducted.
*
* @param error reason why the failure is not recoverable
*/
private FailureHandlingResult(Throwable error) {
this.verticesToRestart = null;
this.restartDelayMS = -1;
this.error = checkNotNull(error);
}
/**
* Returns the tasks to restart.
*
* @return the tasks to restart
*/
public Set<ExecutionVertexID> getVerticesToRestart() {
if (canRestart()) {
return verticesToRestart;
} else {
throw new IllegalStateException("Cannot get vertices to restart when the restarting is suppressed.");
}
}
/**
* Returns the delay before the restarting.
*
* @return the delay before the restarting
*/
public long getRestartDelayMS() {
if (canRestart()) {
return restartDelayMS;
} else {
throw new IllegalStateException("Cannot get restart delay when the restarting is suppressed.");
}
}
/**
* Returns reason why the restarting cannot be conducted.
*
* @return reason why the restarting cannot be conducted
*/
/**
* Returns whether the restarting can be conducted.
*
* @return whether the restarting can be conducted
*/
public boolean canRestart() {
return error == null;
}
/**
* Creates a result of a set of tasks to restart to recover from the failure.
*
* @param verticesToRestart containing task vertices to restart to recover from the failure
* @param restartDelayMS indicate a delay before conducting the restart
* @return result of a set of tasks to restart to recover from the failure
*/
public static FailureHandlingResult restartable(Set<ExecutionVertexID> verticesToRestart, long restartDelayMS) {
return new FailureHandlingResult(verticesToRestart, restartDelayMS);
}
/**
* Creates a result that the failure is not recoverable and no restarting should be conducted.
*
* @param error reason why the failure is not recoverable
* @return result indicating the failure is not recoverable
*/
public static FailureHandlingResult unrecoverable(Throwable error) {
return new FailureHandlingResult(error);
}
} |
Replace if else return and assign statement with ternary operator in priority | private int getColumnsCount(final SQLStatement sqlStatement) {
if (sqlStatement instanceof SelectStatement) {
return ((SelectStatement) sqlStatement).getProjections().getProjections().size();
}
return 0;
} | return 0; | private int getColumnsCount(final SQLStatement sqlStatement) {
return sqlStatement instanceof SelectStatement ? ((SelectStatement) sqlStatement).getProjections().getProjections().size() : 0;
} | class MySQLComStmtPrepareExecutor implements CommandExecutor {
private static final MySQLBinaryStatementRegistry PREPARED_STATEMENT_REGISTRY = MySQLBinaryStatementRegistry.getInstance();
private final MySQLComStmtPreparePacket packet;
private final LogicSchema logicSchema;
public MySQLComStmtPrepareExecutor(final MySQLComStmtPreparePacket packet, final BackendConnection backendConnection) {
this.packet = packet;
logicSchema = backendConnection.getLogicSchema();
}
@Override
public Collection<DatabasePacket> execute() {
Collection<DatabasePacket> result = new LinkedList<>();
int currentSequenceId = 0;
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(packet.getSql(), true);
int parametersCount = sqlStatement.getParametersCount();
int columnsCount = getColumnsCount(sqlStatement);
result.add(new MySQLComStmtPrepareOKPacket(++currentSequenceId, PREPARED_STATEMENT_REGISTRY.register(packet.getSql(), parametersCount), columnsCount, parametersCount, 0));
if (parametersCount > 0) {
for (int i = 0; i < parametersCount; i++) {
result.add(new MySQLColumnDefinition41Packet(++currentSequenceId, "", "", "", "?", "", 0, MySQLColumnType.MYSQL_TYPE_VAR_STRING, 0));
}
result.add(new MySQLEofPacket(++currentSequenceId));
}
if (columnsCount > 0) {
for (int i = 0; i < columnsCount; i++) {
result.add(new MySQLColumnDefinition41Packet(++currentSequenceId, "", "", "", "", "", 0, MySQLColumnType.MYSQL_TYPE_VAR_STRING, 0));
}
result.add(new MySQLEofPacket(++currentSequenceId));
}
return result;
}
} | class MySQLComStmtPrepareExecutor implements CommandExecutor {
private static final MySQLBinaryStatementRegistry PREPARED_STATEMENT_REGISTRY = MySQLBinaryStatementRegistry.getInstance();
private final MySQLComStmtPreparePacket packet;
private final LogicSchema logicSchema;
public MySQLComStmtPrepareExecutor(final MySQLComStmtPreparePacket packet, final BackendConnection backendConnection) {
this.packet = packet;
logicSchema = backendConnection.getLogicSchema();
}
@Override
public Collection<DatabasePacket> execute() {
Collection<DatabasePacket> result = new LinkedList<>();
int currentSequenceId = 0;
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(packet.getSql(), true);
int parametersCount = sqlStatement.getParametersCount();
int columnsCount = getColumnsCount(sqlStatement);
result.add(new MySQLComStmtPrepareOKPacket(++currentSequenceId, PREPARED_STATEMENT_REGISTRY.register(packet.getSql(), parametersCount), columnsCount, parametersCount, 0));
if (parametersCount > 0) {
for (int i = 0; i < parametersCount; i++) {
result.add(new MySQLColumnDefinition41Packet(++currentSequenceId, "", "", "", "?", "", 0, MySQLColumnType.MYSQL_TYPE_VAR_STRING, 0));
}
result.add(new MySQLEofPacket(++currentSequenceId));
}
if (columnsCount > 0) {
for (int i = 0; i < columnsCount; i++) {
result.add(new MySQLColumnDefinition41Packet(++currentSequenceId, "", "", "", "", "", 0, MySQLColumnType.MYSQL_TYPE_VAR_STRING, 0));
}
result.add(new MySQLEofPacket(++currentSequenceId));
}
return result;
}
} |
This becomes `null` for the cases like this ```ballerina public function main() { var a = flush; } ``` | public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerSymbol == null) {
return;
}
addIfSameSymbol(workerFlushExpr.workerSymbol, workerFlushExpr.workerIdentifier.pos);
} | if (workerFlushExpr.workerSymbol == null) { | public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier == null) {
return;
}
addIfSameSymbol(workerFlushExpr.workerSymbol, workerFlushExpr.workerIdentifier.pos);
} | class ReferenceFinder extends BaseVisitor {
private final boolean withDefinition;
private List<Location> referenceLocations;
private BSymbol targetSymbol;
public ReferenceFinder(boolean withDefinition) {
this.withDefinition = withDefinition;
}
public List<Location> findReferences(BLangNode node, BSymbol symbol) {
this.referenceLocations = new ArrayList<>();
this.targetSymbol = symbol;
find(node);
return this.referenceLocations;
}
void find(BLangNode node) {
if (node == null) {
return;
}
node.accept(this);
}
void find(List<? extends BLangNode> nodes) {
for (BLangNode node : nodes) {
find(node);
}
}
@Override
public void visit(BLangPackage pkgNode) {
find(pkgNode.imports);
find(pkgNode.xmlnsList);
find(pkgNode.constants);
find(pkgNode.globalVars);
find(pkgNode.services);
find(pkgNode.annotations);
find(pkgNode.typeDefinitions);
find(pkgNode.classDefinitions.stream()
.filter(c -> !isGeneratedClassDefForService(c))
.collect(Collectors.toList()));
find(pkgNode.functions.stream()
.filter(f -> !f.flagSet.contains(Flag.LAMBDA))
.collect(Collectors.toList()));
if (!(pkgNode instanceof BLangTestablePackage)) {
find(pkgNode.getTestablePkg());
}
}
@Override
public void visit(BLangImportPackage importPkgNode) {
if (importPkgNode.symbol != null
&& this.targetSymbol.name.equals(importPkgNode.symbol.name)
&& this.targetSymbol.pkgID.equals(importPkgNode.symbol.pkgID)
&& this.targetSymbol.pos.equals(importPkgNode.symbol.pos)
&& this.withDefinition) {
this.referenceLocations.add(importPkgNode.alias.pos);
}
}
@Override
public void visit(BLangCompilationUnit unit) {
unit.getTopLevelNodes().forEach(topLevelNode -> find((BLangNode) topLevelNode));
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
find(xmlnsNode.namespaceURI);
addIfSameSymbol(xmlnsNode.symbol, xmlnsNode.prefix.pos);
}
@Override
public void visit(BLangFunction funcNode) {
find(funcNode.annAttachments);
find(funcNode.requiredParams);
find(funcNode.restParam);
find(funcNode.returnTypeAnnAttachments);
find(funcNode.returnTypeNode);
find(funcNode.body);
if (funcNode.symbol.origin != VIRTUAL) {
addIfSameSymbol(funcNode.symbol, funcNode.name.pos);
}
}
@Override
public void visit(BLangResourceFunction resourceFunction) {
visit((BLangFunction) resourceFunction);
}
@Override
public void visit(BLangBlockFunctionBody blockFuncBody) {
for (BLangStatement stmt : blockFuncBody.stmts) {
find(stmt);
}
}
@Override
public void visit(BLangExprFunctionBody exprFuncBody) {
find(exprFuncBody.expr);
}
@Override
public void visit(BLangExternalFunctionBody externFuncBody) {
find(externFuncBody.annAttachments);
}
@Override
public void visit(BLangService serviceNode) {
find(serviceNode.annAttachments);
find(serviceNode.serviceClass);
find(serviceNode.attachedExprs);
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
if (!typeDefinition.flagSet.contains(Flag.ENUM)) {
find(typeDefinition.typeNode);
}
find(typeDefinition.annAttachments);
addIfSameSymbol(typeDefinition.symbol, typeDefinition.name.pos);
}
@Override
public void visit(BLangConstant constant) {
find(constant.typeNode);
find(constant.expr);
addIfSameSymbol(constant.symbol, constant.name.pos);
}
@Override
public void visit(BLangSimpleVariable varNode) {
find(varNode.annAttachments);
find(varNode.typeNode);
find(varNode.expr);
addIfSameSymbol(varNode.symbol, varNode.name.pos);
}
@Override
public void visit(BLangAnnotation annotationNode) {
find(annotationNode.annAttachments);
find(annotationNode.typeNode);
addIfSameSymbol(annotationNode.symbol, annotationNode.name.pos);
}
@Override
public void visit(BLangAnnotationAttachment annAttachmentNode) {
find(annAttachmentNode.expr);
if (!annAttachmentNode.pkgAlias.value.isEmpty()
&& annAttachmentNode.annotationSymbol != null
&& addIfSameSymbol(annAttachmentNode.annotationSymbol.owner, annAttachmentNode.pkgAlias.pos)) {
return;
}
addIfSameSymbol(annAttachmentNode.annotationSymbol, annAttachmentNode.annotationName.pos);
}
@Override
public void visit(BLangTableKeySpecifier tableKeySpecifierNode) {
}
@Override
public void visit(BLangTableKeyTypeConstraint tableKeyTypeConstraint) {
find(tableKeyTypeConstraint.keyType);
}
@Override
public void visit(BLangBlockStmt blockNode) {
for (BLangStatement stmt : blockNode.stmts) {
find(stmt);
}
}
@Override
public void visit(BLangLock.BLangLockStmt lockStmtNode) {
}
@Override
public void visit(BLangLock.BLangUnLockStmt unLockNode) {
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
find(varDefNode.var);
}
@Override
public void visit(BLangAssignment assignNode) {
find(assignNode.expr);
find(assignNode.varRef);
}
@Override
public void visit(BLangCompoundAssignment compoundAssignNode) {
find(compoundAssignNode.expr);
find(compoundAssignNode.varRef);
}
@Override
public void visit(BLangRetry retryNode) {
find(retryNode.retrySpec);
find(retryNode.retryBody);
find(retryNode.onFailClause);
}
@Override
public void visit(BLangRetryTransaction retryTransaction) {
find(retryTransaction.retrySpec);
find(retryTransaction.transaction);
}
@Override
public void visit(BLangRetrySpec retrySpec) {
find(retrySpec.argExprs);
find(retrySpec.retryManagerType);
}
@Override
public void visit(BLangReturn returnNode) {
find(returnNode.expr);
}
@Override
public void visit(BLangPanic panicNode) {
find(panicNode.expr);
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
find(xmlnsStmtNode.xmlnsDecl);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
find(exprStmtNode.expr);
}
@Override
public void visit(BLangIf ifNode) {
find(ifNode.expr);
find(ifNode.body);
find(ifNode.elseStmt);
}
@Override
public void visit(BLangQueryAction queryAction) {
find(queryAction.doClause);
find(queryAction.queryClauseList);
}
@Override
public void visit(BLangMatchStatement matchStatementNode) {
find(matchStatementNode.expr);
find(matchStatementNode.matchClauses);
find(matchStatementNode.onFailClause);
}
@Override
public void visit(BLangMatchGuard matchGuard) {
find(matchGuard.expr);
}
@Override
public void visit(BLangConstPattern constMatchPattern) {
find(constMatchPattern.expr);
}
@Override
public void visit(BLangVarBindingPatternMatchPattern varBindingPattern) {
find(varBindingPattern.getBindingPattern());
}
@Override
public void visit(BLangErrorMatchPattern errorMatchPattern) {
find(errorMatchPattern.errorMessageMatchPattern);
find(errorMatchPattern.errorTypeReference);
find(errorMatchPattern.errorCauseMatchPattern);
find(errorMatchPattern.errorFieldMatchPatterns);
}
@Override
public void visit(BLangErrorMessageMatchPattern errorMessageMatchPattern) {
find(errorMessageMatchPattern.simpleMatchPattern);
}
@Override
public void visit(BLangErrorCauseMatchPattern errorCauseMatchPattern) {
find(errorCauseMatchPattern.simpleMatchPattern);
find(errorCauseMatchPattern.errorMatchPattern);
}
@Override
public void visit(BLangErrorFieldMatchPatterns errorFieldMatchPatterns) {
find(errorFieldMatchPatterns.namedArgMatchPatterns);
find(errorFieldMatchPatterns.restMatchPattern);
}
@Override
public void visit(BLangSimpleMatchPattern simpleMatchPattern) {
find(simpleMatchPattern.varVariableName);
find(simpleMatchPattern.constPattern);
}
@Override
public void visit(BLangNamedArgMatchPattern namedArgMatchPattern) {
find(namedArgMatchPattern.matchPattern);
}
@Override
public void visit(BLangCaptureBindingPattern captureBindingPattern) {
addIfSameSymbol(captureBindingPattern.symbol, captureBindingPattern.getIdentifier().getPosition());
}
@Override
public void visit(BLangListBindingPattern listBindingPattern) {
find(listBindingPattern.bindingPatterns);
find(listBindingPattern.restBindingPattern);
}
@Override
public void visit(BLangMappingBindingPattern mappingBindingPattern) {
find(mappingBindingPattern.fieldBindingPatterns);
find(mappingBindingPattern.restBindingPattern);
}
@Override
public void visit(BLangFieldBindingPattern fieldBindingPattern) {
find(fieldBindingPattern.bindingPattern);
}
@Override
public void visit(BLangRestBindingPattern restBindingPattern) {
addIfSameSymbol(restBindingPattern.symbol, restBindingPattern.getIdentifier().getPosition());
}
@Override
public void visit(BLangErrorBindingPattern errorBindingPattern) {
find(errorBindingPattern.errorMessageBindingPattern);
find(errorBindingPattern.errorTypeReference);
find(errorBindingPattern.errorCauseBindingPattern);
find(errorBindingPattern.errorFieldBindingPatterns);
}
@Override
public void visit(BLangErrorMessageBindingPattern errorMessageBindingPattern) {
find(errorMessageBindingPattern.simpleBindingPattern);
}
@Override
public void visit(BLangErrorCauseBindingPattern errorCauseBindingPattern) {
find(errorCauseBindingPattern.simpleBindingPattern);
find(errorCauseBindingPattern.errorBindingPattern);
}
@Override
public void visit(BLangErrorFieldBindingPatterns errorFieldBindingPatterns) {
find(errorFieldBindingPatterns.namedArgBindingPatterns);
find(errorFieldBindingPatterns.restBindingPattern);
}
@Override
public void visit(BLangSimpleBindingPattern simpleBindingPattern) {
find(simpleBindingPattern.captureBindingPattern);
}
@Override
public void visit(BLangNamedArgBindingPattern namedArgBindingPattern) {
find(namedArgBindingPattern.bindingPattern);
}
@Override
public void visit(BLangForeach foreach) {
find((BLangNode) foreach.variableDefinitionNode);
find(foreach.collection);
find(foreach.body);
find(foreach.onFailClause);
}
@Override
public void visit(BLangDo doNode) {
find(doNode.body);
find(doNode.onFailClause);
}
@Override
public void visit(BLangFail failNode) {
find(failNode.expr);
}
@Override
public void visit(BLangFromClause fromClause) {
find((BLangNode) fromClause.variableDefinitionNode);
find(fromClause.collection);
}
@Override
public void visit(BLangJoinClause joinClause) {
find((BLangNode) joinClause.variableDefinitionNode);
find((BLangOnClause) joinClause.onClause);
find(joinClause.collection);
}
@Override
public void visit(BLangLetClause letClause) {
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
find((BLangNode) letVariable.definitionNode);
}
}
@Override
public void visit(BLangOnClause onClause) {
find(onClause.lhsExpr);
find(onClause.rhsExpr);
}
@Override
public void visit(BLangOrderKey orderKeyClause) {
find(orderKeyClause.expression);
}
@Override
public void visit(BLangOrderByClause orderByClause) {
for (OrderKeyNode orderKeyNode : orderByClause.orderByKeyList) {
find((BLangOrderKey) orderKeyNode);
}
}
@Override
public void visit(BLangSelectClause selectClause) {
find(selectClause.expression);
}
@Override
public void visit(BLangWhereClause whereClause) {
find(whereClause.expression);
}
@Override
public void visit(BLangDoClause doClause) {
find(doClause.body);
}
@Override
public void visit(BLangOnFailClause onFailClause) {
find((BLangNode) onFailClause.variableDefinitionNode);
find(onFailClause.body);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
find(onConflictClause.expression);
}
@Override
public void visit(BLangLimitClause limitClause) {
find(limitClause.expression);
}
@Override
public void visit(BLangMatchClause matchClause) {
find(matchClause.matchPatterns);
find(matchClause.matchGuard);
find(matchClause.blockStmt);
}
@Override
public void visit(BLangWhile whileNode) {
find(whileNode.expr);
find(whileNode.body);
find(whileNode.onFailClause);
}
@Override
public void visit(BLangLock lockNode) {
find(lockNode.body);
find(lockNode.onFailClause);
}
@Override
public void visit(BLangTransaction transactionNode) {
find(transactionNode.transactionBody);
find(transactionNode.onFailClause);
}
@Override
public void visit(BLangTupleDestructure stmt) {
find(stmt.expr);
find(stmt.varRef);
}
@Override
public void visit(BLangRecordDestructure stmt) {
find(stmt.expr);
find(stmt.varRef);
}
@Override
public void visit(BLangErrorDestructure stmt) {
find(stmt.expr);
find(stmt.varRef);
}
@Override
public void visit(BLangForkJoin forkJoin) {
find(forkJoin.workers);
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
find(workerSendNode.expr);
addIfSameSymbol(workerSendNode.workerSymbol, workerSendNode.workerIdentifier.pos);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
addIfSameSymbol(workerReceiveNode.workerSymbol, workerReceiveNode.workerIdentifier.pos);
}
@Override
public void visit(BLangRollback rollbackNode) {
find(rollbackNode.expr);
}
@Override
public void visit(BLangConstRef constRef) {
if (!constRef.pkgAlias.value.isEmpty()) {
addIfSameSymbol(constRef.symbol.owner, constRef.pkgAlias.pos);
}
addIfSameSymbol(constRef.symbol, constRef.variableName.pos);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
find((BLangNode) field);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
find(varRefExpr.expressions);
find((BLangNode) varRefExpr.restParam);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
find(recordRefField.getBindingPattern());
}
find((BLangNode) varRefExpr.restParam);
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
find(varRefExpr.typeNode);
find(varRefExpr.message);
find(varRefExpr.cause);
find(varRefExpr.restVar);
if (varRefExpr.typeNode != null) {
find(varRefExpr.detail);
} else {
visitNamedArgWithoutAddingSymbol(varRefExpr.detail);
}
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
if (varRefExpr.symbol == null) {
return;
}
if (varRefExpr.pkgAlias != null && !varRefExpr.pkgAlias.value.isEmpty() &&
addIfSameSymbol(varRefExpr.symbol.owner, varRefExpr.pkgAlias.pos)) {
return;
}
addIfSameSymbol(varRefExpr.symbol, varRefExpr.variableName.pos);
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
find(fieldAccessExpr.expr);
addIfSameSymbol(fieldAccessExpr.symbol, fieldAccessExpr.field.pos);
}
@Override
public void visit(BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
find(nsPrefixedFieldBasedAccess.expr);
addIfSameSymbol(nsPrefixedFieldBasedAccess.nsSymbol, nsPrefixedFieldBasedAccess.nsPrefix.pos);
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
find(indexAccessExpr.expr);
if (indexAccessExpr.indexExpr instanceof BLangLiteral) {
addIfSameSymbol(indexAccessExpr.symbol, getLocationForLiteral(indexAccessExpr.indexExpr.pos));
} else {
find(indexAccessExpr.indexExpr);
}
}
@Override
public void visit(BLangInvocation invocationExpr) {
if (!invocationExpr.langLibInvocation) {
find(invocationExpr.expr);
}
find(invocationExpr.annAttachments);
find(invocationExpr.argExprs);
if (!invocationExpr.pkgAlias.value.isEmpty() && invocationExpr.symbol != null) {
addIfSameSymbol(invocationExpr.symbol.owner, invocationExpr.pkgAlias.pos);
}
addIfSameSymbol(invocationExpr.symbol, invocationExpr.name.pos);
}
@Override
public void visit(BLangTypeInit typeInit) {
find(typeInit.userDefinedType);
find(typeInit.argsExpr);
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) {
find(actionInvocationExpr.expr);
find(actionInvocationExpr.requiredArgs);
find(actionInvocationExpr.annAttachments);
find(actionInvocationExpr.restArgs);
if (!actionInvocationExpr.pkgAlias.value.isEmpty()) {
addIfSameSymbol(actionInvocationExpr.symbol.owner, actionInvocationExpr.pkgAlias.pos);
}
addIfSameSymbol(actionInvocationExpr.symbol, actionInvocationExpr.name.pos);
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
find(ternaryExpr.expr);
find(ternaryExpr.thenExpr);
find(ternaryExpr.elseExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
find(waitExpr.exprList);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
find(trapExpr.expr);
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
find(binaryExpr.lhsExpr);
find(binaryExpr.rhsExpr);
}
@Override
public void visit(BLangElvisExpr elvisExpr) {
find(elvisExpr.lhsExpr);
find(elvisExpr.rhsExpr);
}
@Override
public void visit(BLangGroupExpr groupExpr) {
find(groupExpr.expression);
}
@Override
public void visit(BLangLetExpression letExpr) {
for (BLangLetVariable letVarDeclaration : letExpr.letVarDeclarations) {
find((BLangNode) letVarDeclaration.definitionNode);
}
find(letExpr.expr);
}
@Override
public void visit(BLangListConstructorExpr listConstructorExpr) {
find(listConstructorExpr.exprs);
}
@Override
public void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr spreadOpExpr) {
find(spreadOpExpr.expr);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
find(tableConstructorExpr.recordLiteralList);
find(tableConstructorExpr.tableKeySpecifier);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
find(unaryExpr.expr);
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
find(typedescExpr.typeNode);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
find(conversionExpr.annAttachments);
find(conversionExpr.typeNode);
find(conversionExpr.expr);
}
@Override
public void visit(BLangXMLQName xmlQName) {
addIfSameSymbol(xmlQName.nsSymbol, xmlQName.prefix.pos);
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
find(xmlAttribute.name);
find(xmlAttribute.value);
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
find(xmlElementLiteral.startTagName);
find(xmlElementLiteral.endTagName);
find(xmlElementLiteral.children);
find(xmlElementLiteral.attributes);
find(xmlElementLiteral.inlineNamespaces);
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
find(xmlTextLiteral.textFragments);
find(xmlTextLiteral.concatExpr);
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
find(xmlCommentLiteral.textFragments);
find(xmlCommentLiteral.concatExpr);
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
find(xmlProcInsLiteral.target);
find(xmlProcInsLiteral.dataFragments);
find(xmlProcInsLiteral.dataConcatExpr);
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
find(xmlQuotedString.textFragments);
find(xmlQuotedString.concatExpr);
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
find(stringTemplateLiteral.exprs);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
find(rawTemplateLiteral.insertions);
find(rawTemplateLiteral.strings);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
find(bLangLambdaFunction.function);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
find(bLangArrowFunction.params);
find(bLangArrowFunction.body);
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
find(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
find(bLangNamedArgsExpression.expr);
addIfSameSymbol(bLangNamedArgsExpression.varSymbol, bLangNamedArgsExpression.name.pos);
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
find(assignableExpr.lhsExpr);
find(assignableExpr.typeNode);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
find(checkedExpr.expr);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanickedExpr) {
find(checkPanickedExpr.expr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
find(serviceConstructorExpr.serviceNode);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
find(typeTestExpr.expr);
find(typeTestExpr.typeNode);
}
@Override
public void visit(BLangIsLikeExpr typeTestExpr) {
find(typeTestExpr.expr);
find(typeTestExpr.typeNode);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
find(annotAccessExpr.expr);
addIfSameSymbol(annotAccessExpr.annotationSymbol, annotAccessExpr.annotationName.pos);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
find(queryExpr.queryClauseList);
}
@Override
public void visit(BLangObjectConstructorExpression objConstructor) {
find(objConstructor.classNode);
}
@Override
public void visit(BLangArrayType arrayType) {
find(arrayType.elemtype);
for (BLangExpression size : arrayType.sizes) {
find(size);
}
}
@Override
public void visit(BLangConstrainedType constrainedType) {
find(constrainedType.type);
find(constrainedType.constraint);
}
@Override
public void visit(BLangStreamType streamType) {
find(streamType.constraint);
find(streamType.error);
}
@Override
public void visit(BLangTableTypeNode tableType) {
find(tableType.constraint);
find(tableType.tableKeySpecifier);
find(tableType.tableKeyTypeConstraint);
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
if (userDefinedType.symbol == null) {
return;
}
if (!userDefinedType.pkgAlias.value.isEmpty()) {
addIfSameSymbol(userDefinedType.symbol.owner, userDefinedType.pkgAlias.pos);
}
addIfSameSymbol(userDefinedType.symbol, userDefinedType.typeName.pos);
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
find(functionTypeNode.params);
find(functionTypeNode.restParam);
find(functionTypeNode.returnTypeNode);
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
find(unionTypeNode.memberTypeNodes);
}
@Override
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
find(intersectionTypeNode.constituentTypeNodes);
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
find(objectTypeNode.fields);
find(objectTypeNode.functions);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
find(recordTypeNode.typeRefs);
find(recordTypeNode.fields);
find(recordTypeNode.restFieldType);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
find(finiteTypeNode.valueSpace);
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
find(tupleTypeNode.memberTypeNodes);
find(tupleTypeNode.restParamType);
}
@Override
public void visit(BLangErrorType errorType) {
find(errorType.detailType);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
find(errorConstructorExpr.errorTypeRef);
find(errorConstructorExpr.positionalArgs);
find(errorConstructorExpr.namedArgs);
}
@Override
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
find(bLangXMLSequenceLiteral.xmlItems);
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable) {
find(bLangTupleVariable.annAttachments);
find(bLangTupleVariable.typeNode);
find(bLangTupleVariable.memberVariables);
find(bLangTupleVariable.restVariable);
find(bLangTupleVariable.expr);
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef) {
find(bLangTupleVariableDef.var);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable) {
find(bLangRecordVariable.annAttachments);
find(bLangRecordVariable.typeNode);
for (BLangRecordVariable.BLangRecordVariableKeyValue variableKeyValue : bLangRecordVariable.variableList) {
find(variableKeyValue.valueBindingPattern);
}
find(bLangRecordVariable.expr);
find(bLangRecordVariable.restParam);
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef) {
find(bLangRecordVariableDef.var);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable) {
find(bLangErrorVariable.annAttachments);
find(bLangErrorVariable.typeNode);
find(bLangErrorVariable.message);
find(bLangErrorVariable.restDetail);
find(bLangErrorVariable.cause);
find(bLangErrorVariable.reasonMatchConst);
find(bLangErrorVariable.expr);
for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : bLangErrorVariable.detail) {
find(errorDetailEntry.valueBindingPattern);
addIfSameSymbol(errorDetailEntry.keySymbol, errorDetailEntry.key.pos);
}
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef) {
find(bLangErrorVariableDef.errorVariable);
}
@Override
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
find(syncSendExpr.expr);
addIfSameSymbol(syncSendExpr.workerSymbol, syncSendExpr.workerIdentifier.pos);
}
@Override
public void visit(BLangWaitForAllExpr waitForAllExpr) {
find(waitForAllExpr.keyValuePairs);
}
@Override
public void visit(BLangRecordLiteral.BLangRecordKeyValueField recordKeyValue) {
find(recordKeyValue.key);
find(recordKeyValue.valueExpr);
}
@Override
public void visit(BLangRecordLiteral.BLangRecordKey recordKey) {
find(recordKey.expr);
addIfSameSymbol(recordKey.fieldSymbol, recordKey.pos);
}
@Override
public void visit(BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField) {
find(spreadOperatorField.expr);
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) {
find(waitKeyValue.keyExpr);
find(waitKeyValue.valueExpr);
}
@Override
public void visit(BLangXMLElementFilter xmlElementFilter) {
addIfSameSymbol(xmlElementFilter.namespaceSymbol, xmlElementFilter.nsPos);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
find(xmlElementAccess.expr);
find(xmlElementAccess.filters);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
find(xmlNavigation.childIndex);
find(xmlNavigation.filters);
find(xmlNavigation.expr);
}
@Override
public void visit(BLangClassDefinition classDefinition) {
find(classDefinition.annAttachments);
find(classDefinition.fields);
find(classDefinition.initFunction);
find(classDefinition.functions);
find(classDefinition.typeRefs);
addIfSameSymbol(classDefinition.symbol, classDefinition.name.pos);
}
@Override
public void visit(BLangListMatchPattern listMatchPattern) {
find(listMatchPattern.matchPatterns);
find(listMatchPattern.restMatchPattern);
}
@Override
public void visit(BLangMappingMatchPattern mappingMatchPattern) {
find(mappingMatchPattern.fieldMatchPatterns);
find(mappingMatchPattern.restMatchPattern);
}
@Override
public void visit(BLangFieldMatchPattern fieldMatchPattern) {
find(fieldMatchPattern.matchPattern);
}
@Override
public void visit(BLangRestMatchPattern restMatchPattern) {
addIfSameSymbol(restMatchPattern.symbol, restMatchPattern.variableName.pos);
}
@Override
public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {
find(resourceAccessInvocation.expr);
find(resourceAccessInvocation.requiredArgs);
find(resourceAccessInvocation.annAttachments);
find(resourceAccessInvocation.restArgs);
find(resourceAccessInvocation.resourceAccessPathSegments);
if (!resourceAccessInvocation.pkgAlias.value.isEmpty()) {
addIfSameSymbol(resourceAccessInvocation.symbol.owner, resourceAccessInvocation.pkgAlias.pos);
}
addIfSameSymbol(resourceAccessInvocation.symbol, resourceAccessInvocation.pos);
}
private void visitNamedArgWithoutAddingSymbol(List<BLangNamedArgsExpression> args) {
for (BLangNamedArgsExpression arg : args) {
find(arg.expr);
}
}
private boolean addIfSameSymbol(BSymbol symbol, Location location) {
if (symbol != null
&& this.targetSymbol.name.equals(symbol.name)
&& this.targetSymbol.pkgID.equals(symbol.pkgID)
&& this.targetSymbol.pos.equals(symbol.pos)
&& (this.withDefinition || !symbol.pos.equals(location))) {
this.referenceLocations.add(location);
return true;
}
return false;
}
private boolean isGeneratedClassDefForService(BLangClassDefinition clazz) {
return clazz.flagSet.contains(Flag.ANONYMOUS) && clazz.flagSet.contains(Flag.SERVICE);
}
/**
* This method is intended to be used for getting the location of a string value with the surrounding quotes
* disregarded. If we give the original location, it'd be problematic for use cases such as renaming since we only
* return a list of locations of references. Without further contextual info, it'll be hard to determine whether a
* particular reference location is a string value.
*
* @param location Location of the string
* @return The modified location with the quotes diregarded
*/
private Location getLocationForLiteral(Location location) {
LineRange lineRange = location.lineRange();
return new BLangDiagnosticLocation(lineRange.filePath(),
lineRange.startLine().line(), lineRange.endLine().line(),
lineRange.startLine().offset() + 1, lineRange.endLine().offset() - 1,
location.textRange().startOffset(), location.textRange().length());
}
} | class ReferenceFinder extends BaseVisitor {
private final boolean withDefinition;
private List<Location> referenceLocations;
private BSymbol targetSymbol;
public ReferenceFinder(boolean withDefinition) {
this.withDefinition = withDefinition;
}
public List<Location> findReferences(BLangNode node, BSymbol symbol) {
this.referenceLocations = new ArrayList<>();
this.targetSymbol = symbol;
find(node);
return this.referenceLocations;
}
void find(BLangNode node) {
if (node == null) {
return;
}
node.accept(this);
}
void find(List<? extends BLangNode> nodes) {
for (BLangNode node : nodes) {
find(node);
}
}
@Override
public void visit(BLangPackage pkgNode) {
find(pkgNode.imports);
find(pkgNode.xmlnsList);
find(pkgNode.constants);
find(pkgNode.globalVars);
find(pkgNode.services);
find(pkgNode.annotations);
find(pkgNode.typeDefinitions);
find(pkgNode.classDefinitions.stream()
.filter(c -> !isGeneratedClassDefForService(c))
.collect(Collectors.toList()));
find(pkgNode.functions.stream()
.filter(f -> !f.flagSet.contains(Flag.LAMBDA))
.collect(Collectors.toList()));
if (!(pkgNode instanceof BLangTestablePackage)) {
find(pkgNode.getTestablePkg());
}
}
@Override
public void visit(BLangImportPackage importPkgNode) {
if (importPkgNode.symbol != null
&& this.targetSymbol.name.equals(importPkgNode.symbol.name)
&& this.targetSymbol.pkgID.equals(importPkgNode.symbol.pkgID)
&& this.targetSymbol.pos.equals(importPkgNode.symbol.pos)
&& this.withDefinition) {
this.referenceLocations.add(importPkgNode.alias.pos);
}
}
@Override
public void visit(BLangCompilationUnit unit) {
unit.getTopLevelNodes().forEach(topLevelNode -> find((BLangNode) topLevelNode));
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
find(xmlnsNode.namespaceURI);
addIfSameSymbol(xmlnsNode.symbol, xmlnsNode.prefix.pos);
}
@Override
public void visit(BLangFunction funcNode) {
find(funcNode.annAttachments);
find(funcNode.requiredParams);
find(funcNode.restParam);
find(funcNode.returnTypeAnnAttachments);
find(funcNode.returnTypeNode);
find(funcNode.body);
if (funcNode.symbol.origin != VIRTUAL) {
addIfSameSymbol(funcNode.symbol, funcNode.name.pos);
}
}
@Override
public void visit(BLangResourceFunction resourceFunction) {
visit((BLangFunction) resourceFunction);
}
@Override
public void visit(BLangBlockFunctionBody blockFuncBody) {
for (BLangStatement stmt : blockFuncBody.stmts) {
find(stmt);
}
}
@Override
public void visit(BLangExprFunctionBody exprFuncBody) {
find(exprFuncBody.expr);
}
@Override
public void visit(BLangExternalFunctionBody externFuncBody) {
find(externFuncBody.annAttachments);
}
@Override
public void visit(BLangService serviceNode) {
find(serviceNode.annAttachments);
find(serviceNode.serviceClass);
find(serviceNode.attachedExprs);
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
if (!typeDefinition.flagSet.contains(Flag.ENUM)) {
find(typeDefinition.typeNode);
}
find(typeDefinition.annAttachments);
addIfSameSymbol(typeDefinition.symbol, typeDefinition.name.pos);
}
@Override
public void visit(BLangConstant constant) {
find(constant.typeNode);
find(constant.expr);
addIfSameSymbol(constant.symbol, constant.name.pos);
}
@Override
public void visit(BLangSimpleVariable varNode) {
find(varNode.annAttachments);
find(varNode.typeNode);
find(varNode.expr);
addIfSameSymbol(varNode.symbol, varNode.name.pos);
}
@Override
public void visit(BLangAnnotation annotationNode) {
find(annotationNode.annAttachments);
find(annotationNode.typeNode);
addIfSameSymbol(annotationNode.symbol, annotationNode.name.pos);
}
@Override
public void visit(BLangAnnotationAttachment annAttachmentNode) {
find(annAttachmentNode.expr);
if (!annAttachmentNode.pkgAlias.value.isEmpty()
&& annAttachmentNode.annotationSymbol != null
&& addIfSameSymbol(annAttachmentNode.annotationSymbol.owner, annAttachmentNode.pkgAlias.pos)) {
return;
}
addIfSameSymbol(annAttachmentNode.annotationSymbol, annAttachmentNode.annotationName.pos);
}
@Override
public void visit(BLangTableKeySpecifier tableKeySpecifierNode) {
}
@Override
public void visit(BLangTableKeyTypeConstraint tableKeyTypeConstraint) {
find(tableKeyTypeConstraint.keyType);
}
@Override
public void visit(BLangBlockStmt blockNode) {
for (BLangStatement stmt : blockNode.stmts) {
find(stmt);
}
}
@Override
public void visit(BLangLock.BLangLockStmt lockStmtNode) {
}
@Override
public void visit(BLangLock.BLangUnLockStmt unLockNode) {
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
find(varDefNode.var);
}
@Override
public void visit(BLangAssignment assignNode) {
find(assignNode.expr);
find(assignNode.varRef);
}
@Override
public void visit(BLangCompoundAssignment compoundAssignNode) {
find(compoundAssignNode.expr);
find(compoundAssignNode.varRef);
}
@Override
public void visit(BLangRetry retryNode) {
find(retryNode.retrySpec);
find(retryNode.retryBody);
find(retryNode.onFailClause);
}
@Override
public void visit(BLangRetryTransaction retryTransaction) {
find(retryTransaction.retrySpec);
find(retryTransaction.transaction);
}
@Override
public void visit(BLangRetrySpec retrySpec) {
find(retrySpec.argExprs);
find(retrySpec.retryManagerType);
}
@Override
public void visit(BLangReturn returnNode) {
find(returnNode.expr);
}
@Override
public void visit(BLangPanic panicNode) {
find(panicNode.expr);
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
find(xmlnsStmtNode.xmlnsDecl);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
find(exprStmtNode.expr);
}
@Override
public void visit(BLangIf ifNode) {
find(ifNode.expr);
find(ifNode.body);
find(ifNode.elseStmt);
}
@Override
public void visit(BLangQueryAction queryAction) {
find(queryAction.doClause);
find(queryAction.queryClauseList);
}
@Override
public void visit(BLangMatchStatement matchStatementNode) {
find(matchStatementNode.expr);
find(matchStatementNode.matchClauses);
find(matchStatementNode.onFailClause);
}
@Override
public void visit(BLangMatchGuard matchGuard) {
find(matchGuard.expr);
}
@Override
public void visit(BLangConstPattern constMatchPattern) {
find(constMatchPattern.expr);
}
@Override
public void visit(BLangVarBindingPatternMatchPattern varBindingPattern) {
find(varBindingPattern.getBindingPattern());
}
@Override
public void visit(BLangErrorMatchPattern errorMatchPattern) {
find(errorMatchPattern.errorMessageMatchPattern);
find(errorMatchPattern.errorTypeReference);
find(errorMatchPattern.errorCauseMatchPattern);
find(errorMatchPattern.errorFieldMatchPatterns);
}
@Override
public void visit(BLangErrorMessageMatchPattern errorMessageMatchPattern) {
find(errorMessageMatchPattern.simpleMatchPattern);
}
@Override
public void visit(BLangErrorCauseMatchPattern errorCauseMatchPattern) {
find(errorCauseMatchPattern.simpleMatchPattern);
find(errorCauseMatchPattern.errorMatchPattern);
}
@Override
public void visit(BLangErrorFieldMatchPatterns errorFieldMatchPatterns) {
find(errorFieldMatchPatterns.namedArgMatchPatterns);
find(errorFieldMatchPatterns.restMatchPattern);
}
@Override
public void visit(BLangSimpleMatchPattern simpleMatchPattern) {
find(simpleMatchPattern.varVariableName);
find(simpleMatchPattern.constPattern);
}
@Override
public void visit(BLangNamedArgMatchPattern namedArgMatchPattern) {
find(namedArgMatchPattern.matchPattern);
}
@Override
public void visit(BLangCaptureBindingPattern captureBindingPattern) {
addIfSameSymbol(captureBindingPattern.symbol, captureBindingPattern.getIdentifier().getPosition());
}
@Override
public void visit(BLangListBindingPattern listBindingPattern) {
find(listBindingPattern.bindingPatterns);
find(listBindingPattern.restBindingPattern);
}
@Override
public void visit(BLangMappingBindingPattern mappingBindingPattern) {
find(mappingBindingPattern.fieldBindingPatterns);
find(mappingBindingPattern.restBindingPattern);
}
@Override
public void visit(BLangFieldBindingPattern fieldBindingPattern) {
find(fieldBindingPattern.bindingPattern);
}
@Override
public void visit(BLangRestBindingPattern restBindingPattern) {
addIfSameSymbol(restBindingPattern.symbol, restBindingPattern.getIdentifier().getPosition());
}
@Override
public void visit(BLangErrorBindingPattern errorBindingPattern) {
find(errorBindingPattern.errorMessageBindingPattern);
find(errorBindingPattern.errorTypeReference);
find(errorBindingPattern.errorCauseBindingPattern);
find(errorBindingPattern.errorFieldBindingPatterns);
}
@Override
public void visit(BLangErrorMessageBindingPattern errorMessageBindingPattern) {
find(errorMessageBindingPattern.simpleBindingPattern);
}
@Override
public void visit(BLangErrorCauseBindingPattern errorCauseBindingPattern) {
find(errorCauseBindingPattern.simpleBindingPattern);
find(errorCauseBindingPattern.errorBindingPattern);
}
@Override
public void visit(BLangErrorFieldBindingPatterns errorFieldBindingPatterns) {
find(errorFieldBindingPatterns.namedArgBindingPatterns);
find(errorFieldBindingPatterns.restBindingPattern);
}
@Override
public void visit(BLangSimpleBindingPattern simpleBindingPattern) {
find(simpleBindingPattern.captureBindingPattern);
}
@Override
public void visit(BLangNamedArgBindingPattern namedArgBindingPattern) {
find(namedArgBindingPattern.bindingPattern);
}
@Override
public void visit(BLangForeach foreach) {
find((BLangNode) foreach.variableDefinitionNode);
find(foreach.collection);
find(foreach.body);
find(foreach.onFailClause);
}
@Override
public void visit(BLangDo doNode) {
find(doNode.body);
find(doNode.onFailClause);
}
@Override
public void visit(BLangFail failNode) {
find(failNode.expr);
}
@Override
public void visit(BLangFromClause fromClause) {
find((BLangNode) fromClause.variableDefinitionNode);
find(fromClause.collection);
}
@Override
public void visit(BLangJoinClause joinClause) {
find((BLangNode) joinClause.variableDefinitionNode);
find((BLangOnClause) joinClause.onClause);
find(joinClause.collection);
}
@Override
public void visit(BLangLetClause letClause) {
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
find((BLangNode) letVariable.definitionNode);
}
}
@Override
public void visit(BLangOnClause onClause) {
find(onClause.lhsExpr);
find(onClause.rhsExpr);
}
@Override
public void visit(BLangOrderKey orderKeyClause) {
find(orderKeyClause.expression);
}
@Override
public void visit(BLangOrderByClause orderByClause) {
for (OrderKeyNode orderKeyNode : orderByClause.orderByKeyList) {
find((BLangOrderKey) orderKeyNode);
}
}
@Override
public void visit(BLangSelectClause selectClause) {
find(selectClause.expression);
}
@Override
public void visit(BLangWhereClause whereClause) {
find(whereClause.expression);
}
@Override
public void visit(BLangDoClause doClause) {
find(doClause.body);
}
@Override
public void visit(BLangOnFailClause onFailClause) {
find((BLangNode) onFailClause.variableDefinitionNode);
find(onFailClause.body);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
find(onConflictClause.expression);
}
@Override
public void visit(BLangLimitClause limitClause) {
find(limitClause.expression);
}
@Override
public void visit(BLangMatchClause matchClause) {
find(matchClause.matchPatterns);
find(matchClause.matchGuard);
find(matchClause.blockStmt);
}
@Override
public void visit(BLangWhile whileNode) {
find(whileNode.expr);
find(whileNode.body);
find(whileNode.onFailClause);
}
@Override
public void visit(BLangLock lockNode) {
find(lockNode.body);
find(lockNode.onFailClause);
}
@Override
public void visit(BLangTransaction transactionNode) {
find(transactionNode.transactionBody);
find(transactionNode.onFailClause);
}
@Override
public void visit(BLangTupleDestructure stmt) {
find(stmt.expr);
find(stmt.varRef);
}
@Override
public void visit(BLangRecordDestructure stmt) {
find(stmt.expr);
find(stmt.varRef);
}
@Override
public void visit(BLangErrorDestructure stmt) {
find(stmt.expr);
find(stmt.varRef);
}
@Override
public void visit(BLangForkJoin forkJoin) {
find(forkJoin.workers);
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
find(workerSendNode.expr);
addIfSameSymbol(workerSendNode.workerSymbol, workerSendNode.workerIdentifier.pos);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
addIfSameSymbol(workerReceiveNode.workerSymbol, workerReceiveNode.workerIdentifier.pos);
}
@Override
public void visit(BLangRollback rollbackNode) {
find(rollbackNode.expr);
}
@Override
public void visit(BLangConstRef constRef) {
if (!constRef.pkgAlias.value.isEmpty()) {
addIfSameSymbol(constRef.symbol.owner, constRef.pkgAlias.pos);
}
addIfSameSymbol(constRef.symbol, constRef.variableName.pos);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
find((BLangNode) field);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
find(varRefExpr.expressions);
find((BLangNode) varRefExpr.restParam);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
find(recordRefField.getBindingPattern());
}
find((BLangNode) varRefExpr.restParam);
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
find(varRefExpr.typeNode);
find(varRefExpr.message);
find(varRefExpr.cause);
find(varRefExpr.restVar);
if (varRefExpr.typeNode != null) {
find(varRefExpr.detail);
} else {
visitNamedArgWithoutAddingSymbol(varRefExpr.detail);
}
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
if (varRefExpr.symbol == null) {
return;
}
if (varRefExpr.pkgAlias != null && !varRefExpr.pkgAlias.value.isEmpty() &&
addIfSameSymbol(varRefExpr.symbol.owner, varRefExpr.pkgAlias.pos)) {
return;
}
addIfSameSymbol(varRefExpr.symbol, varRefExpr.variableName.pos);
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
find(fieldAccessExpr.expr);
addIfSameSymbol(fieldAccessExpr.symbol, fieldAccessExpr.field.pos);
}
@Override
public void visit(BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
find(nsPrefixedFieldBasedAccess.expr);
addIfSameSymbol(nsPrefixedFieldBasedAccess.nsSymbol, nsPrefixedFieldBasedAccess.nsPrefix.pos);
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
find(indexAccessExpr.expr);
if (indexAccessExpr.indexExpr instanceof BLangLiteral) {
addIfSameSymbol(indexAccessExpr.symbol, getLocationForLiteral(indexAccessExpr.indexExpr.pos));
} else {
find(indexAccessExpr.indexExpr);
}
}
@Override
public void visit(BLangInvocation invocationExpr) {
if (!invocationExpr.langLibInvocation) {
find(invocationExpr.expr);
}
find(invocationExpr.annAttachments);
find(invocationExpr.argExprs);
if (!invocationExpr.pkgAlias.value.isEmpty() && invocationExpr.symbol != null) {
addIfSameSymbol(invocationExpr.symbol.owner, invocationExpr.pkgAlias.pos);
}
addIfSameSymbol(invocationExpr.symbol, invocationExpr.name.pos);
}
@Override
public void visit(BLangTypeInit typeInit) {
find(typeInit.userDefinedType);
find(typeInit.argsExpr);
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) {
find(actionInvocationExpr.expr);
find(actionInvocationExpr.requiredArgs);
find(actionInvocationExpr.annAttachments);
find(actionInvocationExpr.restArgs);
if (!actionInvocationExpr.pkgAlias.value.isEmpty()) {
addIfSameSymbol(actionInvocationExpr.symbol.owner, actionInvocationExpr.pkgAlias.pos);
}
addIfSameSymbol(actionInvocationExpr.symbol, actionInvocationExpr.name.pos);
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
find(ternaryExpr.expr);
find(ternaryExpr.thenExpr);
find(ternaryExpr.elseExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
find(waitExpr.exprList);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
find(trapExpr.expr);
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
find(binaryExpr.lhsExpr);
find(binaryExpr.rhsExpr);
}
@Override
public void visit(BLangElvisExpr elvisExpr) {
find(elvisExpr.lhsExpr);
find(elvisExpr.rhsExpr);
}
@Override
public void visit(BLangGroupExpr groupExpr) {
find(groupExpr.expression);
}
@Override
public void visit(BLangLetExpression letExpr) {
for (BLangLetVariable letVarDeclaration : letExpr.letVarDeclarations) {
find((BLangNode) letVarDeclaration.definitionNode);
}
find(letExpr.expr);
}
@Override
public void visit(BLangListConstructorExpr listConstructorExpr) {
find(listConstructorExpr.exprs);
}
@Override
public void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr spreadOpExpr) {
find(spreadOpExpr.expr);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
find(tableConstructorExpr.recordLiteralList);
find(tableConstructorExpr.tableKeySpecifier);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
find(unaryExpr.expr);
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
find(typedescExpr.typeNode);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
find(conversionExpr.annAttachments);
find(conversionExpr.typeNode);
find(conversionExpr.expr);
}
@Override
public void visit(BLangXMLQName xmlQName) {
addIfSameSymbol(xmlQName.nsSymbol, xmlQName.prefix.pos);
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
find(xmlAttribute.name);
find(xmlAttribute.value);
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
find(xmlElementLiteral.startTagName);
find(xmlElementLiteral.endTagName);
find(xmlElementLiteral.children);
find(xmlElementLiteral.attributes);
find(xmlElementLiteral.inlineNamespaces);
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
find(xmlTextLiteral.textFragments);
find(xmlTextLiteral.concatExpr);
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
find(xmlCommentLiteral.textFragments);
find(xmlCommentLiteral.concatExpr);
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
find(xmlProcInsLiteral.target);
find(xmlProcInsLiteral.dataFragments);
find(xmlProcInsLiteral.dataConcatExpr);
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
find(xmlQuotedString.textFragments);
find(xmlQuotedString.concatExpr);
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
find(stringTemplateLiteral.exprs);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
find(rawTemplateLiteral.insertions);
find(rawTemplateLiteral.strings);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
find(bLangLambdaFunction.function);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
find(bLangArrowFunction.params);
find(bLangArrowFunction.body);
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
find(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
find(bLangNamedArgsExpression.expr);
addIfSameSymbol(bLangNamedArgsExpression.varSymbol, bLangNamedArgsExpression.name.pos);
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
find(assignableExpr.lhsExpr);
find(assignableExpr.typeNode);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
find(checkedExpr.expr);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanickedExpr) {
find(checkPanickedExpr.expr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
find(serviceConstructorExpr.serviceNode);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
find(typeTestExpr.expr);
find(typeTestExpr.typeNode);
}
@Override
public void visit(BLangIsLikeExpr typeTestExpr) {
find(typeTestExpr.expr);
find(typeTestExpr.typeNode);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
find(annotAccessExpr.expr);
addIfSameSymbol(annotAccessExpr.annotationSymbol, annotAccessExpr.annotationName.pos);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
find(queryExpr.queryClauseList);
}
@Override
public void visit(BLangObjectConstructorExpression objConstructor) {
find(objConstructor.classNode);
}
@Override
public void visit(BLangArrayType arrayType) {
find(arrayType.elemtype);
for (BLangExpression size : arrayType.sizes) {
find(size);
}
}
@Override
public void visit(BLangConstrainedType constrainedType) {
find(constrainedType.type);
find(constrainedType.constraint);
}
@Override
public void visit(BLangStreamType streamType) {
find(streamType.constraint);
find(streamType.error);
}
@Override
public void visit(BLangTableTypeNode tableType) {
find(tableType.constraint);
find(tableType.tableKeySpecifier);
find(tableType.tableKeyTypeConstraint);
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
if (userDefinedType.symbol == null) {
return;
}
if (!userDefinedType.pkgAlias.value.isEmpty()) {
addIfSameSymbol(userDefinedType.symbol.owner, userDefinedType.pkgAlias.pos);
}
addIfSameSymbol(userDefinedType.symbol, userDefinedType.typeName.pos);
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
find(functionTypeNode.params);
find(functionTypeNode.restParam);
find(functionTypeNode.returnTypeNode);
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
find(unionTypeNode.memberTypeNodes);
}
@Override
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
find(intersectionTypeNode.constituentTypeNodes);
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
find(objectTypeNode.fields);
find(objectTypeNode.functions);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
find(recordTypeNode.typeRefs);
find(recordTypeNode.fields);
find(recordTypeNode.restFieldType);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
find(finiteTypeNode.valueSpace);
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
find(tupleTypeNode.memberTypeNodes);
find(tupleTypeNode.restParamType);
}
@Override
public void visit(BLangErrorType errorType) {
find(errorType.detailType);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
find(errorConstructorExpr.errorTypeRef);
find(errorConstructorExpr.positionalArgs);
find(errorConstructorExpr.namedArgs);
}
@Override
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
find(bLangXMLSequenceLiteral.xmlItems);
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable) {
find(bLangTupleVariable.annAttachments);
find(bLangTupleVariable.typeNode);
find(bLangTupleVariable.memberVariables);
find(bLangTupleVariable.restVariable);
find(bLangTupleVariable.expr);
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef) {
find(bLangTupleVariableDef.var);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable) {
find(bLangRecordVariable.annAttachments);
find(bLangRecordVariable.typeNode);
for (BLangRecordVariable.BLangRecordVariableKeyValue variableKeyValue : bLangRecordVariable.variableList) {
find(variableKeyValue.valueBindingPattern);
}
find(bLangRecordVariable.expr);
find(bLangRecordVariable.restParam);
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef) {
find(bLangRecordVariableDef.var);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable) {
find(bLangErrorVariable.annAttachments);
find(bLangErrorVariable.typeNode);
find(bLangErrorVariable.message);
find(bLangErrorVariable.restDetail);
find(bLangErrorVariable.cause);
find(bLangErrorVariable.reasonMatchConst);
find(bLangErrorVariable.expr);
for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : bLangErrorVariable.detail) {
find(errorDetailEntry.valueBindingPattern);
addIfSameSymbol(errorDetailEntry.keySymbol, errorDetailEntry.key.pos);
}
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef) {
find(bLangErrorVariableDef.errorVariable);
}
@Override
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
find(syncSendExpr.expr);
addIfSameSymbol(syncSendExpr.workerSymbol, syncSendExpr.workerIdentifier.pos);
}
@Override
public void visit(BLangWaitForAllExpr waitForAllExpr) {
find(waitForAllExpr.keyValuePairs);
}
@Override
public void visit(BLangRecordLiteral.BLangRecordKeyValueField recordKeyValue) {
find(recordKeyValue.key);
find(recordKeyValue.valueExpr);
}
@Override
public void visit(BLangRecordLiteral.BLangRecordKey recordKey) {
find(recordKey.expr);
addIfSameSymbol(recordKey.fieldSymbol, recordKey.pos);
}
@Override
public void visit(BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField) {
find(spreadOperatorField.expr);
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) {
find(waitKeyValue.keyExpr);
find(waitKeyValue.valueExpr);
}
@Override
public void visit(BLangXMLElementFilter xmlElementFilter) {
addIfSameSymbol(xmlElementFilter.namespaceSymbol, xmlElementFilter.nsPos);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
find(xmlElementAccess.expr);
find(xmlElementAccess.filters);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
find(xmlNavigation.childIndex);
find(xmlNavigation.filters);
find(xmlNavigation.expr);
}
@Override
public void visit(BLangClassDefinition classDefinition) {
find(classDefinition.annAttachments);
find(classDefinition.fields);
find(classDefinition.initFunction);
find(classDefinition.functions);
find(classDefinition.typeRefs);
addIfSameSymbol(classDefinition.symbol, classDefinition.name.pos);
}
@Override
public void visit(BLangListMatchPattern listMatchPattern) {
find(listMatchPattern.matchPatterns);
find(listMatchPattern.restMatchPattern);
}
@Override
public void visit(BLangMappingMatchPattern mappingMatchPattern) {
find(mappingMatchPattern.fieldMatchPatterns);
find(mappingMatchPattern.restMatchPattern);
}
@Override
public void visit(BLangFieldMatchPattern fieldMatchPattern) {
find(fieldMatchPattern.matchPattern);
}
@Override
public void visit(BLangRestMatchPattern restMatchPattern) {
addIfSameSymbol(restMatchPattern.symbol, restMatchPattern.variableName.pos);
}
@Override
public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {
find(resourceAccessInvocation.expr);
find(resourceAccessInvocation.requiredArgs);
find(resourceAccessInvocation.annAttachments);
find(resourceAccessInvocation.restArgs);
find(resourceAccessInvocation.resourceAccessPathSegments);
if (!resourceAccessInvocation.pkgAlias.value.isEmpty()) {
addIfSameSymbol(resourceAccessInvocation.symbol.owner, resourceAccessInvocation.pkgAlias.pos);
}
addIfSameSymbol(resourceAccessInvocation.symbol, resourceAccessInvocation.resourceAccessPathSegments.pos);
}
private void visitNamedArgWithoutAddingSymbol(List<BLangNamedArgsExpression> args) {
for (BLangNamedArgsExpression arg : args) {
find(arg.expr);
}
}
private boolean addIfSameSymbol(BSymbol symbol, Location location) {
if (symbol != null
&& this.targetSymbol.name.equals(symbol.name)
&& this.targetSymbol.pkgID.equals(symbol.pkgID)
&& this.targetSymbol.pos.equals(symbol.pos)
&& (this.withDefinition || !symbol.pos.equals(location))) {
this.referenceLocations.add(location);
return true;
}
return false;
}
private boolean isGeneratedClassDefForService(BLangClassDefinition clazz) {
return clazz.flagSet.contains(Flag.ANONYMOUS) && clazz.flagSet.contains(Flag.SERVICE);
}
/**
* This method is intended to be used for getting the location of a string value with the surrounding quotes
* disregarded. If we give the original location, it'd be problematic for use cases such as renaming since we only
* return a list of locations of references. Without further contextual info, it'll be hard to determine whether a
* particular reference location is a string value.
*
* @param location Location of the string
* @return The modified location with the quotes diregarded
*/
private Location getLocationForLiteral(Location location) {
LineRange lineRange = location.lineRange();
return new BLangDiagnosticLocation(lineRange.filePath(),
lineRange.startLine().line(), lineRange.endLine().line(),
lineRange.startLine().offset() + 1, lineRange.endLine().offset() - 1,
location.textRange().startOffset(), location.textRange().length());
}
} |
Instead of just sleeping could you poll some counter you increment when returning server errors, and wait for a couple errors to be returned? That would help ensure the error handling path on the client actually runs. | public void testStreamingCommitClosedStream() throws Exception {
List<WorkItemCommitRequest> commitRequestList = new ArrayList<>();
List<CountDownLatch> latches = new ArrayList<>();
Map<Long, WorkItemCommitRequest> commitRequests = new ConcurrentHashMap<>();
AtomicBoolean shouldServerReturnError = new AtomicBoolean(true);
for (int i = 0; i < 500; ++i) {
WorkItemCommitRequest request = makeCommitRequest(i, i * (i < 480 ? 8 : 128));
commitRequestList.add(request);
commitRequests.put((long) i, request);
latches.add(new CountDownLatch(1));
}
Collections.shuffle(commitRequestList);
serviceRegistry.addService(
new CloudWindmillServiceV1Alpha1ImplBase() {
@Override
public StreamObserver<StreamingCommitWorkRequest> commitWorkStream(
StreamObserver<StreamingCommitResponse> responseObserver) {
StreamObserver<StreamingCommitWorkRequest> testCommitStreamObserver =
getTestCommitStreamObserver(responseObserver, commitRequests);
return new StreamObserver<StreamingCommitWorkRequest>() {
@Override
public void onNext(StreamingCommitWorkRequest request) {
if (shouldServerReturnError.get()) {
try {
responseObserver.onError(
new RuntimeException("shouldServerReturnError = true"));
} catch (IllegalStateException e) {
}
} else {
testCommitStreamObserver.onNext(request);
}
}
@Override
public void onError(Throwable throwable) {
testCommitStreamObserver.onError(throwable);
}
@Override
public void onCompleted() {
testCommitStreamObserver.onCompleted();
}
};
}
});
CommitWorkStream stream = client.commitWorkStream();
for (int i = 0; i < commitRequestList.size(); ) {
final CountDownLatch latch = latches.get(i);
if (stream.commitWorkItem(
"computation",
commitRequestList.get(i),
(CommitStatus status) -> {
assertEquals(status, CommitStatus.OK);
latch.countDown();
})) {
i++;
} else {
stream.flush();
}
}
stream.flush();
stream.close();
Thread.sleep(100);
shouldServerReturnError.set(false);
for (CountDownLatch latch : latches) {
assertTrue(latch.await(1, TimeUnit.MINUTES));
}
assertTrue(stream.awaitTermination(30, TimeUnit.SECONDS));
} | Thread.sleep(100); | public void testStreamingCommitClosedStream() throws Exception {
List<WorkItemCommitRequest> commitRequestList = new ArrayList<>();
List<CountDownLatch> latches = new ArrayList<>();
Map<Long, WorkItemCommitRequest> commitRequests = new ConcurrentHashMap<>();
AtomicBoolean shouldServerReturnError = new AtomicBoolean(true);
AtomicBoolean isClientClosed = new AtomicBoolean(false);
AtomicInteger errorsBeforeClose = new AtomicInteger();
AtomicInteger errorsAfterClose = new AtomicInteger();
for (int i = 0; i < 500; ++i) {
WorkItemCommitRequest request = makeCommitRequest(i, i * (i < 480 ? 8 : 128));
commitRequestList.add(request);
commitRequests.put((long) i, request);
latches.add(new CountDownLatch(1));
}
Collections.shuffle(commitRequestList);
serviceRegistry.addService(
new CloudWindmillServiceV1Alpha1ImplBase() {
@Override
public StreamObserver<StreamingCommitWorkRequest> commitWorkStream(
StreamObserver<StreamingCommitResponse> responseObserver) {
StreamObserver<StreamingCommitWorkRequest> testCommitStreamObserver =
getTestCommitStreamObserver(responseObserver, commitRequests);
return new StreamObserver<StreamingCommitWorkRequest>() {
@Override
public void onNext(StreamingCommitWorkRequest request) {
if (shouldServerReturnError.get()) {
try {
responseObserver.onError(
new RuntimeException("shouldServerReturnError = true"));
if (isClientClosed.get()) {
errorsAfterClose.incrementAndGet();
} else {
errorsBeforeClose.incrementAndGet();
}
} catch (IllegalStateException e) {
}
} else {
testCommitStreamObserver.onNext(request);
}
}
@Override
public void onError(Throwable throwable) {
testCommitStreamObserver.onError(throwable);
}
@Override
public void onCompleted() {
testCommitStreamObserver.onCompleted();
}
};
}
});
CommitWorkStream stream = client.commitWorkStream();
for (int i = 0; i < commitRequestList.size(); ) {
final CountDownLatch latch = latches.get(i);
if (stream.commitWorkItem(
"computation",
commitRequestList.get(i),
(CommitStatus status) -> {
assertEquals(status, CommitStatus.OK);
latch.countDown();
})) {
i++;
} else {
stream.flush();
}
}
stream.flush();
long deadline = System.currentTimeMillis() + 60_000;
while (true) {
Thread.sleep(100);
int tmpErrorsBeforeClose = errorsBeforeClose.get();
if (tmpErrorsBeforeClose > 0) {
break;
}
if (System.currentTimeMillis() > deadline) {
fail(
String.format(
"Expected errors not sent by server errorsBeforeClose: %s"
+ " \n Should not reach here if the test is working as expected.",
tmpErrorsBeforeClose));
}
}
stream.close();
isClientClosed.set(true);
deadline = System.currentTimeMillis() + 60_000;
while (true) {
Thread.sleep(100);
int tmpErrorsAfterClose = errorsAfterClose.get();
if (tmpErrorsAfterClose > 0) {
break;
}
if (System.currentTimeMillis() > deadline) {
fail(
String.format(
"Expected errors not sent by server errorsAfterClose: %s"
+ " \n Should not reach here if the test is working as expected.",
tmpErrorsAfterClose));
}
}
shouldServerReturnError.set(false);
for (CountDownLatch latch : latches) {
assertTrue(latch.await(1, TimeUnit.MINUTES));
}
assertTrue(stream.awaitTermination(30, TimeUnit.SECONDS));
} | class ResponseErrorInjector<Stream extends StreamObserver> {
private Stream stream;
private Thread errorThread;
private boolean cancelled = false;
public ResponseErrorInjector(Stream stream) {
this.stream = stream;
errorThread = new Thread(this::errorThreadBody);
errorThread.start();
}
private void errorThreadBody() {
int i = 0;
while (true) {
try {
Thread.sleep(ThreadLocalRandom.current().nextInt(++i * 10));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
synchronized (this) {
if (cancelled) {
break;
}
}
maybeInjectError(stream);
}
}
public void cancel() {
LOG.info("Starting cancel of error injector.");
synchronized (this) {
cancelled = true;
}
errorThread.interrupt();
try {
errorThread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
LOG.info("Done cancelling.");
}
} | class ResponseErrorInjector<Stream extends StreamObserver> {
private Stream stream;
private Thread errorThread;
private boolean cancelled = false;
public ResponseErrorInjector(Stream stream) {
this.stream = stream;
errorThread = new Thread(this::errorThreadBody);
errorThread.start();
}
private void errorThreadBody() {
int i = 0;
while (true) {
try {
Thread.sleep(ThreadLocalRandom.current().nextInt(++i * 10));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
synchronized (this) {
if (cancelled) {
break;
}
}
maybeInjectError(stream);
}
}
public void cancel() {
LOG.info("Starting cancel of error injector.");
synchronized (this) {
cancelled = true;
}
errorThread.interrupt();
try {
errorThread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
LOG.info("Done cancelling.");
}
} |
Maybe also check that `SomethingRemovable` can be looked up independently? | public void testBeanNotRemoved() {
List<String> list = Arc.container().instance(new TypeLiteral<List<String>>() {
}).get();
assertNotNull(list);
assertEquals(1, list.size());
assertEquals(SomethingRemovable.STR, list.get(0));
} | } | public void testBeanNotRemoved() {
List<String> list = Arc.container().instance(new TypeLiteral<List<String>>() {
}).get();
assertNotNull(list);
assertEquals(1, list.size());
assertEquals(SomethingRemovable.STR, list.get(0));
} | class SyntheticInjectionPointInstanceTest {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(SomethingRemovable.class)
.removeUnusedBeans(true)
.beanRegistrars(new TestRegistrar()).build();
@SuppressWarnings("serial")
@Test
static class TestRegistrar implements BeanRegistrar {
@Override
public void register(RegistrationContext context) {
context.configure(List.class)
.addType(Type.create(DotName.createSimple(List.class.getName()), Kind.CLASS))
.addType(ParameterizedType.create(DotName.createSimple(List.class.getName()),
new Type[] { Type.create(DotName.createSimple(String.class.getName()), Kind.CLASS) }, null))
.creator(ListCreator.class)
.addInjectionPoint(ParameterizedType.create(DotName.createSimple(Instance.class),
new Type[] { ClassType.create(DotName.createSimple(SomethingRemovable.class)) }, null))
.unremovable()
.done();
}
}
@Singleton
static class SomethingRemovable {
static final String STR = "I'm still here!";
@Override
public String toString() {
return STR;
}
}
public static class ListCreator implements BeanCreator<List<String>> {
@SuppressWarnings("serial")
@Override
public List<String> create(SyntheticCreationalContext<List<String>> context) {
return List.of(context.getInjectedReference(new TypeLiteral<Instance<SomethingRemovable>>() {
}).get().toString());
}
}
} | class SyntheticInjectionPointInstanceTest {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(SomethingRemovable.class)
.removeUnusedBeans(true)
.beanRegistrars(new TestRegistrar()).build();
@SuppressWarnings("serial")
@Test
static class TestRegistrar implements BeanRegistrar {
@Override
public void register(RegistrationContext context) {
context.configure(List.class)
.addType(Type.create(DotName.createSimple(List.class.getName()), Kind.CLASS))
.addType(ParameterizedType.create(DotName.createSimple(List.class.getName()),
new Type[] { Type.create(DotName.createSimple(String.class.getName()), Kind.CLASS) }, null))
.creator(ListCreator.class)
.addInjectionPoint(ParameterizedType.create(DotName.createSimple(Instance.class),
new Type[] { ClassType.create(DotName.createSimple(SomethingRemovable.class)) }, null))
.unremovable()
.done();
}
}
@Singleton
static class SomethingRemovable {
static final String STR = "I'm still here!";
@Override
public String toString() {
return STR;
}
}
public static class ListCreator implements BeanCreator<List<String>> {
@SuppressWarnings("serial")
@Override
public List<String> create(SyntheticCreationalContext<List<String>> context) {
return List.of(context.getInjectedReference(new TypeLiteral<Instance<SomethingRemovable>>() {
}).get().toString());
}
}
} |
Could these lines be moved outside of `assertThatThrownBy`? | void testCheckpointFailueOnClosedOperator() throws Exception {
ClosingOperator<Integer> operator = new ClosingOperator<>();
StreamTaskMailboxTestHarnessBuilder<Integer> builder =
new StreamTaskMailboxTestHarnessBuilder<>(
OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO);
try (StreamTaskMailboxTestHarness<Integer> harness =
builder.setupOutputForSingletonOperatorChain(operator).build()) {
assertThatThrownBy(
() -> {
harness.setAutoProcess(false);
harness.processElement(new StreamRecord<>(1));
harness.streamTask.operatorChain.finishOperators(
harness.streamTask.getActionExecutor(), StopMode.DRAIN);
harness.streamTask.operatorChain.closeAllOperators();
assertThat(ClosingOperator.closed.get()).isTrue();
harness.streamTask.triggerCheckpointOnBarrier(
new CheckpointMetaData(1, 0),
CheckpointOptions.forCheckpointWithDefaultLocation(),
new CheckpointMetricsBuilder());
})
.satisfies(
(Consumer<Throwable>)
throwable ->
ExceptionUtils.assertThrowableWithMessage(
throwable,
"OperatorChain and Task should never be closed at this point"));
}
} | assertThat(ClosingOperator.closed.get()).isTrue(); | void testCheckpointFailueOnClosedOperator() throws Exception {
ClosingOperator<Integer> operator = new ClosingOperator<>();
StreamTaskMailboxTestHarnessBuilder<Integer> builder =
new StreamTaskMailboxTestHarnessBuilder<>(
OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO);
try (StreamTaskMailboxTestHarness<Integer> harness =
builder.setupOutputForSingletonOperatorChain(operator).build()) {
harness.setAutoProcess(false);
harness.processElement(new StreamRecord<>(1));
harness.streamTask.operatorChain.finishOperators(
harness.streamTask.getActionExecutor(), StopMode.DRAIN);
harness.streamTask.operatorChain.closeAllOperators();
assertThat(ClosingOperator.closed.get()).isTrue();
assertThatThrownBy(
() ->
harness.streamTask.triggerCheckpointOnBarrier(
new CheckpointMetaData(1, 0),
CheckpointOptions.forCheckpointWithDefaultLocation(),
new CheckpointMetricsBuilder()))
.satisfies(
anyCauseMatches(
"OperatorChain and Task should never be closed at this point"));
}
} | class CloseException extends Exception {
public CloseException() {
super("Close Exception. This exception should be suppressed");
}
} | class CloseException extends Exception {
public CloseException() {
super("Close Exception. This exception should be suppressed");
}
} |
ditto, here and other places for the new methods, the implementation is the same as parent class why are they needed in every child class? | public String getString(String propertyName) {
return super.getString(propertyName);
} | return super.getString(propertyName); | public String getString(String propertyName) {
return super.getString(propertyName);
} | class Offer extends Resource {
/**
* Initialize an new instance of the Offer object.
*
* @param offerThroughput the throughput value for this offer.
*/
public Offer(int offerThroughput) {
super();
this.setOfferVersion(Constants.Properties.OFFER_VERSION_V2);
this.setOfferType("");
ObjectNode content = Utils.getSimpleObjectMapper().createObjectNode();
content.put(Constants.Properties.OFFER_THROUGHPUT, offerThroughput);
this.setContent(content);
}
/**
* Initialize an offer object from json string.
*
* @param jsonString the json string that represents the offer.
*/
public Offer(String jsonString) {
super(jsonString);
}
/**
* Gets the self-link of a resource to which the resource offer applies.
*
* @return the resource link.
*/
public String getResourceLink() {
return super.getString(Constants.Properties.RESOURCE_LINK);
}
/**
* Sets the self-link of a resource to which the resource offer applies.
*
* @param resourceLink the resource link.
*/
void setResourceLink(String resourceLink) {
BridgeInternal.setProperty(this, Constants.Properties.RESOURCE_LINK, resourceLink);
}
/**
* Sets the target resource id of a resource to which this offer applies.
*
* @return the resource id.
*/
public String getOfferResourceId() {
return super.getString(Constants.Properties.OFFER_RESOURCE_ID);
}
/**
* Sets the target resource id of a resource to which this offer applies.
*
* @param resourceId the resource id.
*/
void setOfferResourceId(String resourceId) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_RESOURCE_ID, resourceId);
}
/**
* Gets the OfferType for the resource offer.
*
* @return the offer type.
*/
public String getOfferType() {
return super.getString(Constants.Properties.OFFER_TYPE);
}
/**
* Sets the OfferType for the resource offer.
*
* @param offerType the offer type.
*/
public void setOfferType(String offerType) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_TYPE, offerType);
}
/**
* Gets the version of the current offer.
*
* @return the offer version.
*/
public String getOfferVersion() {
return super.getString(Constants.Properties.OFFER_VERSION);
}
/**
* Sets the offer version.
*
* @param offerVersion the version of the offer.
*/
public void setOfferVersion(String offerVersion) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_VERSION, offerVersion);
}
/**
* Gets the offer throughput for this offer.
*
* @return the offer throughput.
*/
public int getThroughput() {
return this.getContent().get(Constants.Properties.OFFER_THROUGHPUT).asInt();
}
/**
* Sets the offer throughput for this offer.
*
* @param throughput the throughput of this offer.
*/
public void setThroughput(int throughput) {
this.getContent().put(Constants.Properties.OFFER_THROUGHPUT, throughput);
}
private ObjectNode getContent() {
return BridgeInternal.getObject(this, Constants.Properties.OFFER_CONTENT);
}
private void setContent(ObjectNode offerContent) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_CONTENT, offerContent);
}
@Override
@Override
public Integer getInt(String propertyName) {
return super.getInt(propertyName);
}
} | class Offer extends Resource {
/**
* Initialize an new instance of the Offer object.
*
* @param offerThroughput the throughput value for this offer.
*/
public Offer(int offerThroughput) {
super();
this.setOfferVersion(Constants.Properties.OFFER_VERSION_V2);
this.setOfferType("");
ObjectNode content = Utils.getSimpleObjectMapper().createObjectNode();
content.put(Constants.Properties.OFFER_THROUGHPUT, offerThroughput);
this.setContent(content);
}
/**
* Initialize an offer object from json string.
*
* @param jsonString the json string that represents the offer.
*/
public Offer(String jsonString) {
super(jsonString);
}
/**
* Gets the self-link of a resource to which the resource offer applies.
*
* @return the resource link.
*/
public String getResourceLink() {
return super.getString(Constants.Properties.RESOURCE_LINK);
}
/**
* Sets the self-link of a resource to which the resource offer applies.
*
* @param resourceLink the resource link.
*/
void setResourceLink(String resourceLink) {
BridgeInternal.setProperty(this, Constants.Properties.RESOURCE_LINK, resourceLink);
}
/**
* Sets the target resource id of a resource to which this offer applies.
*
* @return the resource id.
*/
public String getOfferResourceId() {
return super.getString(Constants.Properties.OFFER_RESOURCE_ID);
}
/**
* Sets the target resource id of a resource to which this offer applies.
*
* @param resourceId the resource id.
*/
void setOfferResourceId(String resourceId) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_RESOURCE_ID, resourceId);
}
/**
* Gets the OfferType for the resource offer.
*
* @return the offer type.
*/
public String getOfferType() {
return super.getString(Constants.Properties.OFFER_TYPE);
}
/**
* Sets the OfferType for the resource offer.
*
* @param offerType the offer type.
*/
public void setOfferType(String offerType) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_TYPE, offerType);
}
/**
* Gets the version of the current offer.
*
* @return the offer version.
*/
public String getOfferVersion() {
return super.getString(Constants.Properties.OFFER_VERSION);
}
/**
* Sets the offer version.
*
* @param offerVersion the version of the offer.
*/
public void setOfferVersion(String offerVersion) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_VERSION, offerVersion);
}
/**
* Gets the offer throughput for this offer.
*
* @return the offer throughput.
*/
public int getThroughput() {
return this.getContent().get(Constants.Properties.OFFER_THROUGHPUT).asInt();
}
/**
* Sets the offer throughput for this offer.
*
* @param throughput the throughput of this offer.
*/
public void setThroughput(int throughput) {
this.getContent().put(Constants.Properties.OFFER_THROUGHPUT, throughput);
}
private ObjectNode getContent() {
return BridgeInternal.getObject(this, Constants.Properties.OFFER_CONTENT);
}
private void setContent(ObjectNode offerContent) {
BridgeInternal.setProperty(this, Constants.Properties.OFFER_CONTENT, offerContent);
}
@Override
@Override
public Integer getInt(String propertyName) {
return super.getInt(propertyName);
}
} |
missing "that" after value, (more occurrences in other files) | public String getDescription() {
return "String value specifies the fully qualified name of the entry point class. " +
"Overrides the class defined in the jar file manifest.";
} | return "String value specifies the fully qualified name of the entry point class. " + | public String getDescription() {
return "String value that specifies the fully qualified name of the entry point class. " +
"Overrides the class defined in the jar file manifest.";
} | class EntryClassQueryParameter extends StringQueryParameter {
public EntryClassQueryParameter() {
super("entry-class", MessageParameterRequisiteness.OPTIONAL);
}
@Override
} | class EntryClassQueryParameter extends StringQueryParameter {
public EntryClassQueryParameter() {
super("entry-class", MessageParameterRequisiteness.OPTIONAL);
}
@Override
} |
enforce no dynamic destinations in batch? Or I think the BoundedWriter needs to group output by topic as publish is just all messages to single topic currently. | public PDone expand(PCollection<T> input) {
if (getTopicProvider() == null && !getDynamicDestinations()) {
throw new IllegalStateException(
"need to set the topic of a PubsubIO.Write transform if not using "
+ "dynamic topic destinations.");
}
SerializableFunction<ValueInSingleWindow<T>, PubsubIO.PubsubTopic> topicFunction =
getTopicFunction();
if (topicFunction == null && getTopicProvider() != null) {
topicFunction = v -> getTopicProvider().get();
}
int maxMessageSize = PUBSUB_MESSAGE_MAX_TOTAL_SIZE;
if (input.isBounded() == PCollection.IsBounded.BOUNDED) {
maxMessageSize =
Math.min(
maxMessageSize,
MoreObjects.firstNonNull(
getMaxBatchBytesSize(), MAX_PUBLISH_BATCH_BYTE_SIZE_DEFAULT));
}
PCollection<PubsubMessage> pubsubMessages =
input.apply(
ParDo.of(new PreparePubsubWriteDoFn<>(getFormatFn(), topicFunction, maxMessageSize)));
switch (input.isBounded()) {
case BOUNDED:
pubsubMessages.apply(
ParDo.of(
new PubsubBoundedWriter(
MoreObjects.firstNonNull(getMaxBatchSize(), MAX_PUBLISH_BATCH_SIZE),
MoreObjects.firstNonNull(
getMaxBatchBytesSize(), MAX_PUBLISH_BATCH_BYTE_SIZE_DEFAULT))));
return PDone.in(input.getPipeline());
case UNBOUNDED:
return pubsubMessages.apply(
new PubsubUnboundedSink(
getPubsubClientFactory(),
getTopicProvider() != null
? NestedValueProvider.of(getTopicProvider(), new TopicPathTranslator())
: null,
getTimestampAttribute(),
getIdAttribute(),
100 /* numShards */,
MoreObjects.firstNonNull(
getMaxBatchSize(), PubsubUnboundedSink.DEFAULT_PUBLISH_BATCH_SIZE),
MoreObjects.firstNonNull(
getMaxBatchBytesSize(), PubsubUnboundedSink.DEFAULT_PUBLISH_BATCH_BYTES),
getPubsubRootUrl()));
}
throw new RuntimeException();
} | } | public PDone expand(PCollection<T> input) {
if (getTopicProvider() == null && !getDynamicDestinations()) {
throw new IllegalStateException(
"need to set the topic of a PubsubIO.Write transform if not using "
+ "dynamic topic destinations.");
}
SerializableFunction<ValueInSingleWindow<T>, PubsubIO.PubsubTopic> topicFunction =
getTopicFunction();
if (topicFunction == null && getTopicProvider() != null) {
topicFunction = v -> getTopicProvider().get();
}
int maxMessageSize = PUBSUB_MESSAGE_MAX_TOTAL_SIZE;
if (input.isBounded() == PCollection.IsBounded.BOUNDED) {
maxMessageSize =
Math.min(
maxMessageSize,
MoreObjects.firstNonNull(
getMaxBatchBytesSize(), MAX_PUBLISH_BATCH_BYTE_SIZE_DEFAULT));
}
PCollection<PubsubMessage> pubsubMessages =
input
.apply(
ParDo.of(
new PreparePubsubWriteDoFn<>(getFormatFn(), topicFunction, maxMessageSize)))
.setCoder(new PubsubMessageWithTopicCoder());
switch (input.isBounded()) {
case BOUNDED:
pubsubMessages.apply(
ParDo.of(
new PubsubBoundedWriter(
MoreObjects.firstNonNull(getMaxBatchSize(), MAX_PUBLISH_BATCH_SIZE),
MoreObjects.firstNonNull(
getMaxBatchBytesSize(), MAX_PUBLISH_BATCH_BYTE_SIZE_DEFAULT))));
return PDone.in(input.getPipeline());
case UNBOUNDED:
return pubsubMessages.apply(
new PubsubUnboundedSink(
getPubsubClientFactory(),
getTopicProvider() != null
? NestedValueProvider.of(getTopicProvider(), new TopicPathTranslator())
: null,
getTimestampAttribute(),
getIdAttribute(),
100 /* numShards */,
MoreObjects.firstNonNull(
getMaxBatchSize(), PubsubUnboundedSink.DEFAULT_PUBLISH_BATCH_SIZE),
MoreObjects.firstNonNull(
getMaxBatchBytesSize(), PubsubUnboundedSink.DEFAULT_PUBLISH_BATCH_BYTES),
getPubsubRootUrl()));
}
throw new RuntimeException();
} | class Builder<T> {
abstract Builder<T> setTopicProvider(ValueProvider<PubsubTopic> topicProvider);
abstract Builder<T> setTopicFunction(
SerializableFunction<ValueInSingleWindow<T>, PubsubTopic> topicFunction);
abstract Builder<T> setDynamicDestinations(boolean dynamicDestinations);
abstract Builder<T> setPubsubClientFactory(PubsubClient.PubsubClientFactory factory);
abstract Builder<T> setMaxBatchSize(Integer batchSize);
abstract Builder<T> setMaxBatchBytesSize(Integer maxBatchBytesSize);
abstract Builder<T> setTimestampAttribute(String timestampAttribute);
abstract Builder<T> setIdAttribute(String idAttribute);
abstract Builder<T> setFormatFn(SerializableFunction<T, PubsubMessage> formatFn);
abstract Builder<T> setPubsubRootUrl(String pubsubRootUrl);
abstract Write<T> build();
} | class Builder<T> {
abstract Builder<T> setTopicProvider(ValueProvider<PubsubTopic> topicProvider);
abstract Builder<T> setTopicFunction(
SerializableFunction<ValueInSingleWindow<T>, PubsubTopic> topicFunction);
abstract Builder<T> setDynamicDestinations(boolean dynamicDestinations);
abstract Builder<T> setPubsubClientFactory(PubsubClient.PubsubClientFactory factory);
abstract Builder<T> setMaxBatchSize(Integer batchSize);
abstract Builder<T> setMaxBatchBytesSize(Integer maxBatchBytesSize);
abstract Builder<T> setTimestampAttribute(String timestampAttribute);
abstract Builder<T> setIdAttribute(String idAttribute);
abstract Builder<T> setFormatFn(SerializableFunction<T, PubsubMessage> formatFn);
abstract Builder<T> setPubsubRootUrl(String pubsubRootUrl);
abstract Write<T> build();
} |
I would instead do something like: ```java for (Handler h : InitialConfigurator.DELAYED_HANDLER.clearHandlers()) { try { h.close(); } catch (Throwable ignored) { } } ``` This way a subsequent dev mode iteration is more likely to work correctly. | public static void printStopTime(String name) {
final long stopTimeNanoSeconds = System.nanoTime() - bootStopTime;
final Logger logger = Logger.getLogger("io.quarkus");
final BigDecimal secondsRepresentation = convertToBigDecimalSeconds(stopTimeNanoSeconds);
logger.infof("%s stopped in %ss",
(UNSET_VALUE.equals(name) || name == null || name.trim().isEmpty()) ? "Quarkus" : name,
secondsRepresentation);
bootStopTime = -1;
/**
* We can safely close log handlers after stop time has been printed.
*/
InitialConfigurator.DELAYED_HANDLER.close();
} | InitialConfigurator.DELAYED_HANDLER.close(); | public static void printStopTime(String name) {
final long stopTimeNanoSeconds = System.nanoTime() - bootStopTime;
final Logger logger = Logger.getLogger("io.quarkus");
final BigDecimal secondsRepresentation = convertToBigDecimalSeconds(stopTimeNanoSeconds);
logger.infof("%s stopped in %ss",
(UNSET_VALUE.equals(name) || name == null || name.trim().isEmpty()) ? "Quarkus" : name,
secondsRepresentation);
bootStopTime = -1;
/**
* We can safely close log handlers after stop time has been printed.
*/
Handler[] handlers = InitialConfigurator.DELAYED_HANDLER.clearHandlers();
for (Handler handler : handlers) {
try {
handler.close();
} catch (Throwable ignored) {
}
}
} | class Timing {
private static volatile long bootStartTime = -1;
private static volatile long bootStopTime = -1;
private static volatile String httpServerInfo = "";
private static final String UNSET_VALUE = "<<unset>>";
public static void staticInitStarted() {
if (bootStartTime < 0) {
bootStartTime = System.nanoTime();
}
}
public static void staticInitStopped() {
if (bootStopTime < 0) {
bootStopTime = System.nanoTime();
}
}
/**
* An extension providing the HTTP server should set the current info (port, host, etc.) in a recorder method of a
* RUNTIME_INIT build step. Note that it is not possible to inspect thee RUN_TIME config properties through MP Config.
*
* @param info
*/
public static void setHttpServer(String info) {
httpServerInfo = info;
}
/**
* This method is replaced in native mode
*/
public static void mainStarted() {
}
public static void restart() {
bootStartTime = System.nanoTime();
}
public static void printStartupTime(String name, String version, String quarkusVersion, String features, String profile,
boolean liveCoding) {
final long bootTimeNanoSeconds = System.nanoTime() - bootStartTime;
final Logger logger = Logger.getLogger("io.quarkus");
final BigDecimal secondsRepresentation = convertToBigDecimalSeconds(bootTimeNanoSeconds);
String safeAppName = (name == null || name.trim().isEmpty()) ? UNSET_VALUE : name;
String safeAppVersion = (version == null || version.trim().isEmpty()) ? UNSET_VALUE : version;
if (UNSET_VALUE.equals(safeAppName) || UNSET_VALUE.equals(safeAppVersion)) {
logger.infof("Quarkus %s started in %ss. %s", quarkusVersion, secondsRepresentation, httpServerInfo);
} else {
logger.infof("%s %s (running on Quarkus %s) started in %ss. %s", name, version, quarkusVersion,
secondsRepresentation, httpServerInfo);
}
logger.infof("Profile %s activated. %s", profile, liveCoding ? "Live Coding activated." : "");
logger.infof("Installed features: [%s]", features);
bootStartTime = -1;
}
public static BigDecimal convertToBigDecimalSeconds(final long timeNanoSeconds) {
final BigDecimal secondsRepresentation = BigDecimal.valueOf(timeNanoSeconds)
.divide(BigDecimal.valueOf(1_000_000), BigDecimal.ROUND_HALF_UP)
.divide(BigDecimal.valueOf(1_000), 3, BigDecimal.ROUND_HALF_UP);
return secondsRepresentation;
}
} | class Timing {
private static volatile long bootStartTime = -1;
private static volatile long bootStopTime = -1;
private static volatile String httpServerInfo = "";
private static final String UNSET_VALUE = "<<unset>>";
public static void staticInitStarted() {
if (bootStartTime < 0) {
bootStartTime = System.nanoTime();
}
}
public static void staticInitStopped() {
if (bootStopTime < 0) {
bootStopTime = System.nanoTime();
}
}
/**
* An extension providing the HTTP server should set the current info (port, host, etc.) in a recorder method of a
* RUNTIME_INIT build step. Note that it is not possible to inspect thee RUN_TIME config properties through MP Config.
*
* @param info
*/
public static void setHttpServer(String info) {
httpServerInfo = info;
}
/**
* This method is replaced in native mode
*/
public static void mainStarted() {
}
public static void restart() {
bootStartTime = System.nanoTime();
}
public static void printStartupTime(String name, String version, String quarkusVersion, String features, String profile,
boolean liveCoding) {
final long bootTimeNanoSeconds = System.nanoTime() - bootStartTime;
final Logger logger = Logger.getLogger("io.quarkus");
final BigDecimal secondsRepresentation = convertToBigDecimalSeconds(bootTimeNanoSeconds);
String safeAppName = (name == null || name.trim().isEmpty()) ? UNSET_VALUE : name;
String safeAppVersion = (version == null || version.trim().isEmpty()) ? UNSET_VALUE : version;
if (UNSET_VALUE.equals(safeAppName) || UNSET_VALUE.equals(safeAppVersion)) {
logger.infof("Quarkus %s started in %ss. %s", quarkusVersion, secondsRepresentation, httpServerInfo);
} else {
logger.infof("%s %s (running on Quarkus %s) started in %ss. %s", name, version, quarkusVersion,
secondsRepresentation, httpServerInfo);
}
logger.infof("Profile %s activated. %s", profile, liveCoding ? "Live Coding activated." : "");
logger.infof("Installed features: [%s]", features);
bootStartTime = -1;
}
public static BigDecimal convertToBigDecimalSeconds(final long timeNanoSeconds) {
final BigDecimal secondsRepresentation = BigDecimal.valueOf(timeNanoSeconds)
.divide(BigDecimal.valueOf(1_000_000), BigDecimal.ROUND_HALF_UP)
.divide(BigDecimal.valueOf(1_000), 3, BigDecimal.ROUND_HALF_UP);
return secondsRepresentation;
}
} |
Shall we use a meaningful variable name? | private String getClassPath() {
List<Path> dependencies = new ArrayList<>();
dependencies.add(ProjectUtils.getBallerinaRTJarPath());
dependencies.addAll(ProjectUtils.testDependencies());
StringJoiner cp = new StringJoiner(File.pathSeparator);
dependencies.stream().map(Path::toString).forEach(cp::add);
return cp.toString();
} | StringJoiner cp = new StringJoiner(File.pathSeparator); | private String getClassPath() {
List<Path> dependencies = new ArrayList<>();
dependencies.add(ProjectUtils.getBallerinaRTJarPath());
dependencies.addAll(ProjectUtils.testDependencies());
StringJoiner classPath = new StringJoiner(File.pathSeparator);
dependencies.stream().map(Path::toString).forEach(classPath::add);
return classPath.toString();
} | class RunTestsTask implements Task {
private final PrintStream out;
private final PrintStream err;
private final List<String> args;
private final String includesInCoverage;
private List<String> groupList;
private List<String> disableGroupList;
private boolean report;
private boolean coverage;
private boolean isSingleTestExecution;
private boolean isRerunTestExecution;
private List<String> singleExecTests;
TestReport testReport;
public RunTestsTask(PrintStream out, PrintStream err, String[] args, String includes) {
this.out = out;
this.err = err;
this.args = Lists.of(args);
this.includesInCoverage = includes;
}
public RunTestsTask(PrintStream out, PrintStream err, String[] args, boolean rerunTests, List<String> groupList,
List<String> disableGroupList, List<String> testList, String includes) {
this.out = out;
this.err = err;
this.args = Lists.of(args);
this.isSingleTestExecution = false;
this.isRerunTestExecution = rerunTests;
if (this.isRerunTestExecution) {
testList = new ArrayList<>();
}
if (disableGroupList != null) {
this.disableGroupList = disableGroupList;
} else if (groupList != null) {
this.groupList = groupList;
}
if (testList != null) {
isSingleTestExecution = true;
singleExecTests = testList;
}
this.includesInCoverage = includes;
}
@Override
public void execute(Project project) {
try {
ProjectUtils.checkExecutePermission(project.sourceRoot());
} catch (ProjectException e) {
throw createLauncherException(e.getMessage());
}
filterTestGroups();
report = project.buildOptions().testReport();
coverage = project.buildOptions().codeCoverage();
if (report || coverage) {
testReport = new TestReport();
}
Path cachesRoot;
Target target;
Path testsCachePath;
try {
if (project.kind() == ProjectKind.BUILD_PROJECT) {
cachesRoot = project.sourceRoot();
} else {
cachesRoot = Files.createTempDirectory("ballerina-test-cache" + System.nanoTime());
}
target = new Target(cachesRoot);
testsCachePath = target.getTestsCachePath();
} catch (IOException e) {
throw createLauncherException("error while creating target directory: ", e);
}
boolean hasTests = false;
PackageCompilation packageCompilation = project.currentPackage().getCompilation();
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JvmTarget.JAVA_11);
JarResolver jarResolver = jBallerinaBackend.jarResolver();
TestProcessor testProcessor = new TestProcessor(jarResolver);
List<String> moduleNamesList = new ArrayList<>();
Map<String, TestSuite> testSuiteMap = new HashMap<>();
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
ModuleName moduleName = module.moduleName();
TestSuite suite = testProcessor.testSuite(module).orElse(null);
if (suite == null) {
continue;
} else if (isRerunTestExecution && suite.getTests().isEmpty()) {
continue;
} else if (isSingleTestExecution && suite.getTests().isEmpty()) {
continue;
}
if (!hasTests) {
hasTests = true;
}
if (isRerunTestExecution) {
singleExecTests = readFailedTestsFromFile(target.path());
}
if (isSingleTestExecution || isRerunTestExecution) {
suite.setTests(TesterinaUtils.getSingleExecutionTests(suite, singleExecTests));
}
suite.setReportRequired(report || coverage);
String resolvedModuleName =
module.isDefaultModule() ? moduleName.toString() : module.moduleName().moduleNamePart();
testSuiteMap.put(resolvedModuleName, suite);
moduleNamesList.add(resolvedModuleName);
}
writeToTestSuiteJson(testSuiteMap, testsCachePath);
int testResult;
if (hasTests) {
try {
testResult = runTestSuit(testsCachePath, target,
project.currentPackage().packageName().toString(),
project.currentPackage().packageOrg().toString());
if (report || coverage) {
for (String moduleName : moduleNamesList) {
ModuleStatus moduleStatus = loadModuleStatusFromFile(
testsCachePath.resolve(moduleName).resolve(TesterinaConstants.STATUS_FILE));
if (!moduleName.equals(project.currentPackage().packageName().toString())) {
moduleName = ModuleName.from(project.currentPackage().packageName(), moduleName).toString();
}
testReport.addModuleStatus(moduleName, moduleStatus);
}
try {
generateCoverage(project, jarResolver, jBallerinaBackend);
generateHtmlReport(project, this.out, testReport, target);
} catch (IOException e) {
cleanTempCache(project, cachesRoot);
throw createLauncherException("error occurred while generating test report :", e);
}
}
} catch (IOException | InterruptedException e) {
cleanTempCache(project, cachesRoot);
throw createLauncherException("error occurred while running tests", e);
}
if (testResult != 0) {
cleanTempCache(project, cachesRoot);
throw createLauncherException("there are test failures");
}
}
cleanTempCache(project, cachesRoot);
}
private void generateCoverage(Project project, JarResolver jarResolver, JBallerinaBackend jBallerinaBackend)
throws IOException {
if (!coverage) {
return;
}
Map<String, ModuleCoverage> moduleCoverageMap = initializeCoverageMap(project);
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
CoverageReport coverageReport = new CoverageReport(module);
coverageReport.generateReport(jarResolver, moduleCoverageMap, jBallerinaBackend);
}
for (Map.Entry mapElement : moduleCoverageMap.entrySet()) {
String moduleName = (String) mapElement.getKey();
ModuleCoverage moduleCoverage = (ModuleCoverage) mapElement.getValue();
testReport.addCoverage(moduleName, moduleCoverage);
}
}
private void filterTestGroups() {
TesterinaRegistry testerinaRegistry = TesterinaRegistry.getInstance();
if (disableGroupList != null) {
testerinaRegistry.setGroups(disableGroupList);
testerinaRegistry.setShouldIncludeGroups(false);
} else if (groupList != null) {
testerinaRegistry.setGroups(groupList);
testerinaRegistry.setShouldIncludeGroups(true);
}
}
/**
* Write the test report content into a json file.
*
* @param out PrintStream object to print messages to console
* @param testReport Data that are parsed to the json
*/
private void generateHtmlReport(Project project, PrintStream out, TestReport testReport, Target target)
throws IOException {
if (!report && !coverage) {
return;
}
if (testReport.getModuleStatus().size() <= 0) {
return;
}
out.println();
out.println("Generating Test Report");
Path reportDir = target.getReportPath();
String projectName;
if (project.kind() == ProjectKind.SINGLE_FILE_PROJECT) {
projectName = ProjectUtils.getJarFileName(project.currentPackage().getDefaultModule())
+ ProjectConstants.BLANG_SOURCE_EXT;
} else {
projectName = project.currentPackage().packageName().toString();
}
testReport.setProjectName(projectName);
testReport.finalizeTestResults(coverage);
Gson gson = new Gson();
String json = gson.toJson(testReport).replaceAll("\\\\\\(", "(");
File jsonFile = new File(reportDir.resolve(RESULTS_JSON_FILE).toString());
try (Writer writer = new OutputStreamWriter(new FileOutputStream(jsonFile), StandardCharsets.UTF_8)) {
writer.write(new String(json.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8));
out.println("\t" + jsonFile.getAbsolutePath() + "\n");
}
Path reportZipPath = Paths.get(System.getProperty(BALLERINA_HOME)).resolve(BALLERINA_HOME_LIB).
resolve(TesterinaConstants.TOOLS_DIR_NAME).resolve(TesterinaConstants.COVERAGE_DIR).
resolve(REPORT_ZIP_NAME);
if (Files.exists(reportZipPath)) {
String content;
try {
CodeCoverageUtils.unzipReportResources(new FileInputStream(reportZipPath.toFile()),
reportDir.toFile());
content = Files.readString(reportDir.resolve(RESULTS_HTML_FILE));
content = content.replace(REPORT_DATA_PLACEHOLDER, json);
} catch (IOException e) {
throw createLauncherException("error occurred while preparing test report: " + e.toString());
}
File htmlFile = new File(reportDir.resolve(RESULTS_HTML_FILE).toString());
try (Writer writer = new OutputStreamWriter(new FileOutputStream(htmlFile), StandardCharsets.UTF_8)) {
writer.write(new String(content.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8));
out.println("\tView the test report at: " +
FILE_PROTOCOL + Paths.get(htmlFile.getPath()).toAbsolutePath().normalize().toString());
}
} else {
String reportToolsPath = "<" + BALLERINA_HOME + ">" + File.separator + BALLERINA_HOME_LIB +
File.separator + TOOLS_DIR_NAME + File.separator + COVERAGE_DIR + File.separator +
REPORT_ZIP_NAME;
out.println("warning: Could not find the required HTML report tools for code coverage at "
+ reportToolsPath);
}
}
private int runTestSuit(Path testCachePath, Target target, String packageName, String orgName) throws IOException,
InterruptedException {
List<String> cmdArgs = new ArrayList<>();
cmdArgs.add(System.getProperty("java.command"));
String mainClassName = TesterinaConstants.TESTERINA_LAUNCHER_CLASS_NAME;
if (coverage) {
String jacocoAgentJarPath = Paths.get(System.getProperty(BALLERINA_HOME)).resolve(BALLERINA_HOME_BRE)
.resolve(BALLERINA_HOME_LIB).resolve(TesterinaConstants.AGENT_FILE_NAME).toString();
String agentCommand = "-javaagent:"
+ jacocoAgentJarPath
+ "=destfile="
+ target.getTestsCachePath().resolve(TesterinaConstants.COVERAGE_DIR)
.resolve(TesterinaConstants.EXEC_FILE_NAME).toString();
if (!TesterinaConstants.DOT.equals(packageName) && this.includesInCoverage == null) {
agentCommand += ",includes=" + orgName + ".*";
} else {
agentCommand += ",includes=" + this.includesInCoverage;
}
cmdArgs.add(agentCommand);
}
cmdArgs.addAll(Lists.of("-cp", getClassPath()));
if (isInDebugMode()) {
cmdArgs.add(getDebugArgs(this.err));
}
cmdArgs.add(mainClassName);
cmdArgs.add(testCachePath.toString());
cmdArgs.add(target.path().toString());
cmdArgs.add(Boolean.toString(report));
cmdArgs.add(Boolean.toString(coverage));
cmdArgs.addAll(args);
ProcessBuilder processBuilder = new ProcessBuilder(cmdArgs).inheritIO();
Process proc = processBuilder.start();
return proc.waitFor();
}
/**
* Loads the ModuleStatus object by reading a given Json.
*
* @param statusJsonPath file path of json file
* @return ModuleStatus object
* @throws IOException if file does not exist
*/
private ModuleStatus loadModuleStatusFromFile(Path statusJsonPath) throws IOException {
Gson gson = new Gson();
BufferedReader bufferedReader = Files.newBufferedReader(statusJsonPath, StandardCharsets.UTF_8);
return gson.fromJson(bufferedReader, ModuleStatus.class);
}
private List<String> readFailedTestsFromFile(Path rerunTestJsonPath) {
Gson gson = new Gson();
rerunTestJsonPath = Paths.get(rerunTestJsonPath.toString(), RERUN_TEST_JSON_FILE);
try (BufferedReader bufferedReader = Files.newBufferedReader(rerunTestJsonPath, StandardCharsets.UTF_8)) {
return gson.fromJson(bufferedReader, ArrayList.class);
} catch (IOException e) {
throw createLauncherException("error while running failed tests : ", e);
}
}
private void cleanTempCache(Project project, Path cachesRoot) {
if (project.kind() == ProjectKind.SINGLE_FILE_PROJECT) {
FileUtils.deleteDirectory(cachesRoot);
}
}
/**
* Initialize coverage map used for aggregating module wise coverage.
*
* @param project Project
* @return Map<String, ModuleCoverage>
*/
private Map<String, ModuleCoverage> initializeCoverageMap(Project project) {
Map<String, ModuleCoverage> moduleCoverageMap = new HashMap<>();
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
moduleCoverageMap.put(module.moduleName().toString(), new ModuleCoverage());
}
return moduleCoverageMap;
}
/**
* Write the content of each test suite into a common json.
*/
private static void writeToTestSuiteJson(Map<String, TestSuite> testSuiteMap, Path testsCachePath) {
if (!Files.exists(testsCachePath)) {
try {
Files.createDirectories(testsCachePath);
} catch (IOException e) {
throw LauncherUtils.createLauncherException("couldn't create test suite : " + e.toString());
}
}
Path jsonFilePath = Paths.get(testsCachePath.toString(), TesterinaConstants.TESTERINA_TEST_SUITE);
File jsonFile = new File(jsonFilePath.toString());
try (Writer writer = new OutputStreamWriter(new FileOutputStream(jsonFile), StandardCharsets.UTF_8)) {
Gson gson = new Gson();
String json = gson.toJson(testSuiteMap);
writer.write(new String(json.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8));
} catch (IOException e) {
throw LauncherUtils.createLauncherException("couldn't write data to test suite file : " + e.toString());
}
}
} | class RunTestsTask implements Task {
private final PrintStream out;
private final PrintStream err;
private final List<String> args;
private final String includesInCoverage;
private List<String> groupList;
private List<String> disableGroupList;
private boolean report;
private boolean coverage;
private boolean isSingleTestExecution;
private boolean isRerunTestExecution;
private List<String> singleExecTests;
TestReport testReport;
public RunTestsTask(PrintStream out, PrintStream err, String[] args, String includes) {
this.out = out;
this.err = err;
this.args = Lists.of(args);
this.includesInCoverage = includes;
}
public RunTestsTask(PrintStream out, PrintStream err, String[] args, boolean rerunTests, List<String> groupList,
List<String> disableGroupList, List<String> testList, String includes) {
this.out = out;
this.err = err;
this.args = Lists.of(args);
this.isSingleTestExecution = false;
this.isRerunTestExecution = rerunTests;
if (this.isRerunTestExecution) {
testList = new ArrayList<>();
}
if (disableGroupList != null) {
this.disableGroupList = disableGroupList;
} else if (groupList != null) {
this.groupList = groupList;
}
if (testList != null) {
isSingleTestExecution = true;
singleExecTests = testList;
}
this.includesInCoverage = includes;
}
@Override
public void execute(Project project) {
try {
ProjectUtils.checkExecutePermission(project.sourceRoot());
} catch (ProjectException e) {
throw createLauncherException(e.getMessage());
}
filterTestGroups();
report = project.buildOptions().testReport();
coverage = project.buildOptions().codeCoverage();
if (report || coverage) {
testReport = new TestReport();
}
Path cachesRoot;
Target target;
Path testsCachePath;
try {
if (project.kind() == ProjectKind.BUILD_PROJECT) {
cachesRoot = project.sourceRoot();
} else {
cachesRoot = Files.createTempDirectory("ballerina-test-cache" + System.nanoTime());
}
target = new Target(cachesRoot);
testsCachePath = target.getTestsCachePath();
} catch (IOException e) {
throw createLauncherException("error while creating target directory: ", e);
}
boolean hasTests = false;
PackageCompilation packageCompilation = project.currentPackage().getCompilation();
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JvmTarget.JAVA_11);
JarResolver jarResolver = jBallerinaBackend.jarResolver();
TestProcessor testProcessor = new TestProcessor(jarResolver);
List<String> moduleNamesList = new ArrayList<>();
Map<String, TestSuite> testSuiteMap = new HashMap<>();
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
ModuleName moduleName = module.moduleName();
TestSuite suite = testProcessor.testSuite(module).orElse(null);
if (suite == null) {
continue;
} else if (isRerunTestExecution && suite.getTests().isEmpty()) {
continue;
} else if (isSingleTestExecution && suite.getTests().isEmpty()) {
continue;
}
if (!hasTests) {
hasTests = true;
}
if (isRerunTestExecution) {
singleExecTests = readFailedTestsFromFile(target.path());
}
if (isSingleTestExecution || isRerunTestExecution) {
suite.setTests(TesterinaUtils.getSingleExecutionTests(suite, singleExecTests));
}
suite.setReportRequired(report || coverage);
String resolvedModuleName =
module.isDefaultModule() ? moduleName.toString() : module.moduleName().moduleNamePart();
testSuiteMap.put(resolvedModuleName, suite);
moduleNamesList.add(resolvedModuleName);
}
writeToTestSuiteJson(testSuiteMap, testsCachePath);
if (hasTests) {
int testResult;
try {
testResult = runTestSuit(testsCachePath, target,
project.currentPackage().packageName().toString(),
project.currentPackage().packageOrg().toString());
if (report || coverage) {
for (String moduleName : moduleNamesList) {
ModuleStatus moduleStatus = loadModuleStatusFromFile(
testsCachePath.resolve(moduleName).resolve(TesterinaConstants.STATUS_FILE));
if (!moduleName.equals(project.currentPackage().packageName().toString())) {
moduleName = ModuleName.from(project.currentPackage().packageName(), moduleName).toString();
}
testReport.addModuleStatus(moduleName, moduleStatus);
}
try {
generateCoverage(project, jarResolver, jBallerinaBackend);
generateHtmlReport(project, this.out, testReport, target);
} catch (IOException e) {
cleanTempCache(project, cachesRoot);
throw createLauncherException("error occurred while generating test report :", e);
}
}
} catch (IOException | InterruptedException e) {
cleanTempCache(project, cachesRoot);
throw createLauncherException("error occurred while running tests", e);
}
if (testResult != 0) {
cleanTempCache(project, cachesRoot);
throw createLauncherException("there are test failures");
}
}
cleanTempCache(project, cachesRoot);
}
private void generateCoverage(Project project, JarResolver jarResolver, JBallerinaBackend jBallerinaBackend)
throws IOException {
if (!coverage) {
return;
}
Map<String, ModuleCoverage> moduleCoverageMap = initializeCoverageMap(project);
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
CoverageReport coverageReport = new CoverageReport(module);
coverageReport.generateReport(jarResolver, moduleCoverageMap, jBallerinaBackend);
}
for (Map.Entry mapElement : moduleCoverageMap.entrySet()) {
String moduleName = (String) mapElement.getKey();
ModuleCoverage moduleCoverage = (ModuleCoverage) mapElement.getValue();
testReport.addCoverage(moduleName, moduleCoverage);
}
}
private void filterTestGroups() {
TesterinaRegistry testerinaRegistry = TesterinaRegistry.getInstance();
if (disableGroupList != null) {
testerinaRegistry.setGroups(disableGroupList);
testerinaRegistry.setShouldIncludeGroups(false);
} else if (groupList != null) {
testerinaRegistry.setGroups(groupList);
testerinaRegistry.setShouldIncludeGroups(true);
}
}
/**
* Write the test report content into a json file.
*
* @param out PrintStream object to print messages to console
* @param testReport Data that are parsed to the json
*/
private void generateHtmlReport(Project project, PrintStream out, TestReport testReport, Target target)
throws IOException {
if (!report && !coverage) {
return;
}
if (testReport.getModuleStatus().size() <= 0) {
return;
}
out.println();
out.println("Generating Test Report");
Path reportDir = target.getReportPath();
String projectName;
if (project.kind() == ProjectKind.SINGLE_FILE_PROJECT) {
projectName = ProjectUtils.getJarFileName(project.currentPackage().getDefaultModule())
+ ProjectConstants.BLANG_SOURCE_EXT;
} else {
projectName = project.currentPackage().packageName().toString();
}
testReport.setProjectName(projectName);
testReport.finalizeTestResults(coverage);
Gson gson = new Gson();
String json = gson.toJson(testReport).replaceAll("\\\\\\(", "(");
File jsonFile = new File(reportDir.resolve(RESULTS_JSON_FILE).toString());
try (Writer writer = new OutputStreamWriter(new FileOutputStream(jsonFile), StandardCharsets.UTF_8)) {
writer.write(new String(json.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8));
out.println("\t" + jsonFile.getAbsolutePath() + "\n");
}
Path reportZipPath = Paths.get(System.getProperty(BALLERINA_HOME)).resolve(BALLERINA_HOME_LIB).
resolve(TesterinaConstants.TOOLS_DIR_NAME).resolve(TesterinaConstants.COVERAGE_DIR).
resolve(REPORT_ZIP_NAME);
if (Files.exists(reportZipPath)) {
String content;
try {
CodeCoverageUtils.unzipReportResources(new FileInputStream(reportZipPath.toFile()),
reportDir.toFile());
content = Files.readString(reportDir.resolve(RESULTS_HTML_FILE));
content = content.replace(REPORT_DATA_PLACEHOLDER, json);
} catch (IOException e) {
throw createLauncherException("error occurred while preparing test report: " + e.toString());
}
File htmlFile = new File(reportDir.resolve(RESULTS_HTML_FILE).toString());
try (Writer writer = new OutputStreamWriter(new FileOutputStream(htmlFile), StandardCharsets.UTF_8)) {
writer.write(new String(content.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8));
out.println("\tView the test report at: " +
FILE_PROTOCOL + Paths.get(htmlFile.getPath()).toAbsolutePath().normalize().toString());
}
} else {
String reportToolsPath = "<" + BALLERINA_HOME + ">" + File.separator + BALLERINA_HOME_LIB +
File.separator + TOOLS_DIR_NAME + File.separator + COVERAGE_DIR + File.separator +
REPORT_ZIP_NAME;
out.println("warning: Could not find the required HTML report tools for code coverage at "
+ reportToolsPath);
}
}
private int runTestSuit(Path testCachePath, Target target, String packageName, String orgName) throws IOException,
InterruptedException {
List<String> cmdArgs = new ArrayList<>();
cmdArgs.add(System.getProperty("java.command"));
String mainClassName = TesterinaConstants.TESTERINA_LAUNCHER_CLASS_NAME;
if (coverage) {
String jacocoAgentJarPath = Paths.get(System.getProperty(BALLERINA_HOME)).resolve(BALLERINA_HOME_BRE)
.resolve(BALLERINA_HOME_LIB).resolve(TesterinaConstants.AGENT_FILE_NAME).toString();
String agentCommand = "-javaagent:"
+ jacocoAgentJarPath
+ "=destfile="
+ target.getTestsCachePath().resolve(TesterinaConstants.COVERAGE_DIR)
.resolve(TesterinaConstants.EXEC_FILE_NAME).toString();
if (!TesterinaConstants.DOT.equals(packageName) && this.includesInCoverage == null) {
agentCommand += ",includes=" + orgName + ".*";
} else {
agentCommand += ",includes=" + this.includesInCoverage;
}
cmdArgs.add(agentCommand);
}
cmdArgs.addAll(Lists.of("-cp", getClassPath()));
if (isInDebugMode()) {
cmdArgs.add(getDebugArgs(this.err));
}
cmdArgs.add(mainClassName);
cmdArgs.add(testCachePath.toString());
cmdArgs.add(target.path().toString());
cmdArgs.add(Boolean.toString(report));
cmdArgs.add(Boolean.toString(coverage));
cmdArgs.addAll(args);
ProcessBuilder processBuilder = new ProcessBuilder(cmdArgs).inheritIO();
Process proc = processBuilder.start();
return proc.waitFor();
}
/**
* Loads the ModuleStatus object by reading a given Json.
*
* @param statusJsonPath file path of json file
* @return ModuleStatus object
* @throws IOException if file does not exist
*/
private ModuleStatus loadModuleStatusFromFile(Path statusJsonPath) throws IOException {
Gson gson = new Gson();
BufferedReader bufferedReader = Files.newBufferedReader(statusJsonPath, StandardCharsets.UTF_8);
return gson.fromJson(bufferedReader, ModuleStatus.class);
}
private List<String> readFailedTestsFromFile(Path rerunTestJsonPath) {
Gson gson = new Gson();
rerunTestJsonPath = Paths.get(rerunTestJsonPath.toString(), RERUN_TEST_JSON_FILE);
try (BufferedReader bufferedReader = Files.newBufferedReader(rerunTestJsonPath, StandardCharsets.UTF_8)) {
return gson.fromJson(bufferedReader, ArrayList.class);
} catch (IOException e) {
throw createLauncherException("error while running failed tests : ", e);
}
}
private void cleanTempCache(Project project, Path cachesRoot) {
if (project.kind() == ProjectKind.SINGLE_FILE_PROJECT) {
FileUtils.deleteDirectory(cachesRoot);
}
}
/**
* Initialize coverage map used for aggregating module wise coverage.
*
* @param project Project
* @return Map<String, ModuleCoverage>
*/
private Map<String, ModuleCoverage> initializeCoverageMap(Project project) {
Map<String, ModuleCoverage> moduleCoverageMap = new HashMap<>();
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
moduleCoverageMap.put(module.moduleName().toString(), new ModuleCoverage());
}
return moduleCoverageMap;
}
/**
* Write the content of each test suite into a common json.
*/
private static void writeToTestSuiteJson(Map<String, TestSuite> testSuiteMap, Path testsCachePath) {
if (!Files.exists(testsCachePath)) {
try {
Files.createDirectories(testsCachePath);
} catch (IOException e) {
throw LauncherUtils.createLauncherException("couldn't create test cache directories : " + e.toString());
}
}
Path jsonFilePath = Paths.get(testsCachePath.toString(), TesterinaConstants.TESTERINA_TEST_SUITE);
File jsonFile = new File(jsonFilePath.toString());
try (Writer writer = new OutputStreamWriter(new FileOutputStream(jsonFile), StandardCharsets.UTF_8)) {
Gson gson = new Gson();
String json = gson.toJson(testSuiteMap);
writer.write(new String(json.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8));
} catch (IOException e) {
throw LauncherUtils.createLauncherException("couldn't write data to test suite file : " + e.toString());
}
}
} |
Lets address them here: https://github.com/ballerina-platform/ballerina-lang/issues/35757 | public static BString toExpString(double x, Object fractionDigits) {
BString str = FloatUtils.getBStringIfInfiniteOrNaN(x);
if (str != null) {
return str;
}
long noOfFractionDigits;
double xAbsValue = Math.abs(x);
if (fractionDigits == null) {
if (xAbsValue == 0) {
noOfFractionDigits = 1;
} else {
int integerPart = (int) (Math.log10(xAbsValue));
noOfFractionDigits = BigDecimal.valueOf(xAbsValue / Math.pow(10, integerPart)).scale();
}
} else {
noOfFractionDigits = (long) fractionDigits;
}
if (noOfFractionDigits < 0) {
throw ErrorUtils.createInvalidFractionDigitsError();
}
if (FloatUtils.checkFractionDigitsWithinRange(noOfFractionDigits)) {
noOfFractionDigits = 308;
}
int exponent = (int) noOfFractionDigits;
int tens = 0;
if (xAbsValue != 0 && xAbsValue < 1) {
double multipliedValue = xAbsValue;
while (multipliedValue < 1) {
multipliedValue = xAbsValue * Math.pow(10, tens + 1);
tens++;
}
}
BigDecimal numberBigDecimal = new BigDecimal(x);
if (xAbsValue != 0 && xAbsValue < 1) {
numberBigDecimal = numberBigDecimal.setScale(exponent + tens, RoundingMode.HALF_EVEN);
} else {
numberBigDecimal = numberBigDecimal.setScale(exponent, RoundingMode.HALF_EVEN);
}
String power = "0".repeat(exponent);
DecimalFormat decimalFormat = new DecimalFormat("0." + power + "E0");
decimalFormat.setRoundingMode(RoundingMode.HALF_EVEN);
String res = decimalFormat.format(numberBigDecimal);
int indexOfExp = res.lastIndexOf("E");
String firstSection = res.substring(0, indexOfExp);
int idxOfDecimalPoint = firstSection.lastIndexOf(".");
if (idxOfDecimalPoint == firstSection.length() - 1) {
firstSection = res.substring(0, idxOfDecimalPoint);
}
String secondSection = res.substring(indexOfExp + 1);
int p = Integer.parseInt(secondSection);
if (p >= 0) {
secondSection = "e+" + secondSection;
} else {
secondSection = "e" + secondSection;
}
return StringUtils.fromString(firstSection + secondSection);
} | throw ErrorUtils.createInvalidFractionDigitsError(); | public static BString toExpString(double x, Object fractionDigits) {
BString str = FloatUtils.getBStringIfInfiniteOrNaN(x);
if (str != null) {
return str;
}
long noOfFractionDigits;
double xAbsValue = Math.abs(x);
if (fractionDigits == null) {
if (xAbsValue == 0) {
noOfFractionDigits = 1;
} else {
int integerPart = (int) (Math.log10(xAbsValue));
noOfFractionDigits = BigDecimal.valueOf(xAbsValue / Math.pow(10, integerPart)).scale();
}
} else {
noOfFractionDigits = (long) fractionDigits;
}
if (noOfFractionDigits < 0) {
throw ErrorUtils.createInvalidFractionDigitsError();
}
if (FloatUtils.checkFractionDigitsWithinRange(noOfFractionDigits)) {
noOfFractionDigits = 308;
}
int exponent = (int) noOfFractionDigits;
int tens = 0;
if (xAbsValue != 0 && xAbsValue < 1) {
double multipliedValue = xAbsValue;
while (multipliedValue < 1) {
multipliedValue = xAbsValue * Math.pow(10, tens + 1);
tens++;
}
}
BigDecimal numberBigDecimal = new BigDecimal(x);
if (xAbsValue != 0 && xAbsValue < 1) {
numberBigDecimal = numberBigDecimal.setScale(exponent + tens, RoundingMode.HALF_EVEN);
} else {
numberBigDecimal = numberBigDecimal.setScale(exponent, RoundingMode.HALF_EVEN);
}
String power = "0".repeat(exponent);
DecimalFormat decimalFormat = new DecimalFormat("0." + power + "E0");
decimalFormat.setRoundingMode(RoundingMode.HALF_EVEN);
String res = decimalFormat.format(numberBigDecimal);
int indexOfExp = res.lastIndexOf("E");
String firstSection = res.substring(0, indexOfExp);
int idxOfDecimalPoint = firstSection.lastIndexOf(".");
if (idxOfDecimalPoint == firstSection.length() - 1) {
firstSection = res.substring(0, idxOfDecimalPoint);
}
String secondSection = res.substring(indexOfExp + 1);
int p = Integer.parseInt(secondSection);
if (p >= 0) {
secondSection = "e+" + secondSection;
} else {
secondSection = "e" + secondSection;
}
return StringUtils.fromString(firstSection + secondSection);
} | class ToExpString {
} | class ToExpString {
} |
why not just output to the `outputStream`? So that we don't need to buffer a log of content in memory? | public void run() {
BufferedReader outReader = null;
String line = null;
long startTime = System.currentTimeMillis();
try {
outReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
while (!isStop && (line = outReader.readLine()) != null) {
outputBuffer.append(line + '\n');
SparkLoadAppHandle.State oldState = handle.getState();
SparkLoadAppHandle.State newState = oldState;
if (line.contains(STATE)) {
String state = regexGetState(line);
if (state != null) {
YarnApplicationState yarnState = YarnApplicationState.valueOf(state);
newState = fromYarnState(yarnState);
if (newState != oldState) {
handle.setState(newState);
}
}
String appId = regexGetAppId(line);
if (appId != null) {
if (!appId.equals(handle.getAppId())) {
handle.setAppId(appId);
}
}
LOG.debug("spark appId that handle get is {}, state: {}", handle.getAppId(), handle.getState().toString());
switch (newState) {
case UNKNOWN:
case CONNECTED:
case SUBMITTED:
if (System.currentTimeMillis() - startTime > submitTimeoutMs) {
isStop = true;
handle.kill();
}
break;
case RUNNING:
case FINISHED:
isStop = true;
break;
case KILLED:
case FAILED:
case LOST:
isStop = true;
handle.kill();
break;
default:
Preconditions.checkState(false, "wrong spark app state");
}
}
else if (line.contains(QUEUE) || line.contains(START_TIME) || line.contains(FINAL_STATUS) ||
line.contains(URL) || line.contains(USER)) {
String value = getValue(line);
if (!Strings.isNullOrEmpty(value)) {
try {
if (line.contains(QUEUE)) {
handle.setQueue(value);
} else if (line.contains(START_TIME)) {
handle.setStartTime(Long.parseLong(value));
} else if (line.contains(FINAL_STATUS)) {
handle.setFinalStatus(FinalApplicationStatus.valueOf(value));
} else if (line.contains(URL)) {
handle.setUrl(value);
} else if (line.contains(USER)) {
handle.setUser(value);
}
} catch (IllegalArgumentException e) {
LOG.warn("parse log encounter an error, line: {}, msg: {}", line, e.getMessage());
}
}
}
}
if (outputStream != null) {
outputStream.write(outputBuffer.toString().getBytes());
}
} catch (Exception e) {
LOG.warn("Exception monitoring process.", e);
} finally {
try {
if (outReader != null) {
outReader.close();
}
if (outputStream != null) {
outputStream.close();
}
} catch (IOException e) {
LOG.warn("close buffered reader error", e);
}
}
} | outputBuffer.append(line + '\n'); | public void run() {
BufferedReader outReader = null;
String line = null;
long startTime = System.currentTimeMillis();
try {
outReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
while (!isStop && (line = outReader.readLine()) != null) {
if (outputStream != null) {
outputStream.write((line + "\n").getBytes());
}
SparkLoadAppHandle.State oldState = handle.getState();
SparkLoadAppHandle.State newState = oldState;
if (line.contains(STATE)) {
String state = regexGetState(line);
if (state != null) {
YarnApplicationState yarnState = YarnApplicationState.valueOf(state);
newState = fromYarnState(yarnState);
if (newState != oldState) {
handle.setState(newState);
}
}
String appId = regexGetAppId(line);
if (appId != null) {
if (!appId.equals(handle.getAppId())) {
handle.setAppId(appId);
}
}
LOG.debug("spark appId that handle get is {}, state: {}", handle.getAppId(), handle.getState().toString());
switch (newState) {
case UNKNOWN:
case CONNECTED:
case SUBMITTED:
if (System.currentTimeMillis() - startTime > submitTimeoutMs) {
isStop = true;
handle.kill();
}
break;
case RUNNING:
case FINISHED:
isStop = true;
break;
case KILLED:
case FAILED:
case LOST:
isStop = true;
handle.kill();
break;
default:
Preconditions.checkState(false, "wrong spark app state");
}
}
else if (line.contains(QUEUE) || line.contains(START_TIME) || line.contains(FINAL_STATUS) ||
line.contains(URL) || line.contains(USER)) {
String value = getValue(line);
if (!Strings.isNullOrEmpty(value)) {
try {
if (line.contains(QUEUE)) {
handle.setQueue(value);
} else if (line.contains(START_TIME)) {
handle.setStartTime(Long.parseLong(value));
} else if (line.contains(FINAL_STATUS)) {
handle.setFinalStatus(FinalApplicationStatus.valueOf(value));
} else if (line.contains(URL)) {
handle.setUrl(value);
} else if (line.contains(USER)) {
handle.setUser(value);
}
} catch (IllegalArgumentException e) {
LOG.warn("parse log encounter an error, line: {}, msg: {}", line, e.getMessage());
}
}
}
}
} catch (Exception e) {
LOG.warn("Exception monitoring process.", e);
} finally {
try {
if (outReader != null) {
outReader.close();
}
if (outputStream != null) {
outputStream.close();
}
} catch (IOException e) {
LOG.warn("close buffered reader error", e);
}
}
} | class LogMonitor extends Thread {
private final Process process;
private SparkLoadAppHandle handle;
private long submitTimeoutMs;
private boolean isStop;
private StringBuffer outputBuffer;
private OutputStream outputStream;
private static final String STATE = "state";
private static final String QUEUE = "queue";
private static final String START_TIME = "start time";
private static final String FINAL_STATUS = "final status";
private static final String URL = "tracking URL";
private static final String USER = "user";
private static final long DEFAULT_SUBMIT_TIMEOUT_MS = 300000L;
public LogMonitor(SparkLoadAppHandle handle) {
this.handle = handle;
this.process = handle.getProcess();
this.isStop = false;
this.outputBuffer = new StringBuffer();
setSubmitTimeoutMs(DEFAULT_SUBMIT_TIMEOUT_MS);
}
public void setSubmitTimeoutMs(long submitTimeoutMs) {
this.submitTimeoutMs = submitTimeoutMs;
}
public void setRedirectLogPath(String redirectLogPath) throws IOException {
this.outputStream = new FileOutputStream(new File(redirectLogPath), false);
this.handle.setLogPath(redirectLogPath);
}
@Override
private static String getValue(String line) {
String result = null;
List<String> entry = Splitter.onPattern(":").trimResults().limit(2).splitToList(line);
if (entry.size() == 2) {
result = entry.get(1);
}
return result;
}
private static String regexGetState(String line) {
String result = null;
Matcher stateMatcher = Pattern.compile("(?<=\\(state: )(.+?)(?=\\))").matcher(line);
if (stateMatcher.find()) {
result = stateMatcher.group();
}
return result;
}
private static String regexGetAppId(String line) {
String result = null;
Matcher appIdMatcher = Pattern.compile("application_[0-9]+_[0-9]+").matcher(line);
if (appIdMatcher.find()) {
result = appIdMatcher.group();
}
return result;
}
} | class LogMonitor extends Thread {
private final Process process;
private SparkLoadAppHandle handle;
private long submitTimeoutMs;
private boolean isStop;
private OutputStream outputStream;
private static final String STATE = "state";
private static final String QUEUE = "queue";
private static final String START_TIME = "start time";
private static final String FINAL_STATUS = "final status";
private static final String URL = "tracking URL";
private static final String USER = "user";
private static final long DEFAULT_SUBMIT_TIMEOUT_MS = 300000L;
public LogMonitor(SparkLoadAppHandle handle) {
this.handle = handle;
this.process = handle.getProcess();
this.isStop = false;
setSubmitTimeoutMs(DEFAULT_SUBMIT_TIMEOUT_MS);
}
public void setSubmitTimeoutMs(long submitTimeoutMs) {
this.submitTimeoutMs = submitTimeoutMs;
}
public void setRedirectLogPath(String redirectLogPath) throws IOException {
this.outputStream = new FileOutputStream(new File(redirectLogPath), false);
this.handle.setLogPath(redirectLogPath);
}
@Override
private static String getValue(String line) {
String result = null;
List<String> entry = Splitter.onPattern(":").trimResults().limit(2).splitToList(line);
if (entry.size() == 2) {
result = entry.get(1);
}
return result;
}
private static String regexGetState(String line) {
String result = null;
Matcher stateMatcher = Pattern.compile("(?<=\\(state: )(.+?)(?=\\))").matcher(line);
if (stateMatcher.find()) {
result = stateMatcher.group();
}
return result;
}
private static String regexGetAppId(String line) {
String result = null;
Matcher appIdMatcher = Pattern.compile("application_[0-9]+_[0-9]+").matcher(line);
if (appIdMatcher.find()) {
result = appIdMatcher.group();
}
return result;
}
} |
use the min watermark as the initial watermark? | public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
ListStateDescriptor<Long> watermarkStateDesc =
new ListStateDescriptor<>("watermark", LongSerializer.INSTANCE);
this.watermarkState = context.getOperatorStateStore().getListState(watermarkStateDesc);
if (context.isRestored()) {
this.currentWatermark = this.watermarkState.get().iterator().next();
}
} | this.currentWatermark = this.watermarkState.get().iterator().next(); | public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
ListStateDescriptor<Long> watermarkStateDesc =
new ListStateDescriptor<>("watermark", LongSerializer.INSTANCE);
this.watermarkState = context.getOperatorStateStore().getUnionListState(watermarkStateDesc);
if (context.isRestored()) {
Iterable<Long> watermarks = watermarkState.get();
if (watermarks != null) {
Long minWatermark = Long.MAX_VALUE;
for (Long watermark : watermarks) {
minWatermark = Math.min(watermark, minWatermark);
}
if (minWatermark != Long.MAX_VALUE) {
this.currentWatermark = minWatermark;
}
}
}
} | class SlicingWindowOperator<K, W> extends TableStreamOperator<RowData>
implements OneInputStreamOperator<RowData, RowData>, Triggerable<K, W>, KeyContext {
private static final long serialVersionUID = 1L;
private static final String LATE_ELEMENTS_DROPPED_METRIC_NAME = "numLateRecordsDropped";
private static final String LATE_ELEMENTS_DROPPED_RATE_METRIC_NAME = "lateRecordsDroppedRate";
private static final String WATERMARK_LATENCY_METRIC_NAME = "watermarkLatency";
/** The concrete window operator implementation. */
private final SlicingWindowProcessor<W> windowProcessor;
/** This is used for emitting elements with a given timestamp. */
protected transient TimestampedCollector<RowData> collector;
/** The service to register timers. */
private transient InternalTimerService<W> internalTimerService;
/** The tracked processing time triggered last time. */
private transient long lastTriggeredProcessingTime;
/** The operator state to store watermark. */
private ListState<Long> watermarkState;
private transient Counter numLateRecordsDropped;
private transient Meter lateRecordsDroppedRate;
private transient Gauge<Long> watermarkLatency;
public SlicingWindowOperator(SlicingWindowProcessor<W> windowProcessor) {
this.windowProcessor = windowProcessor;
setChainingStrategy(ChainingStrategy.ALWAYS);
}
@Override
public void open() throws Exception {
super.open();
lastTriggeredProcessingTime = Long.MIN_VALUE;
collector = new TimestampedCollector<>(output);
collector.eraseTimestamp();
internalTimerService =
getInternalTimerService(
"window-timers", windowProcessor.createWindowSerializer(), this);
windowProcessor.open(
new WindowProcessorContext<>(
getContainingTask(),
getContainingTask().getEnvironment().getMemoryManager(),
computeMemorySize(),
internalTimerService,
getKeyedStateBackend(),
collector,
getRuntimeContext()));
windowProcessor.advanceProgress(currentWatermark);
this.numLateRecordsDropped = metrics.counter(LATE_ELEMENTS_DROPPED_METRIC_NAME);
this.lateRecordsDroppedRate =
metrics.meter(
LATE_ELEMENTS_DROPPED_RATE_METRIC_NAME,
new MeterView(numLateRecordsDropped));
this.watermarkLatency =
metrics.gauge(
WATERMARK_LATENCY_METRIC_NAME,
() -> {
long watermark = internalTimerService.currentWatermark();
if (watermark < 0) {
return 0L;
} else {
return internalTimerService.currentProcessingTime() - watermark;
}
});
}
@Override
@Override
public void snapshotState(StateSnapshotContext context) throws Exception {
super.snapshotState(context);
this.watermarkState.clear();
this.watermarkState.add(currentWatermark);
}
@Override
public void close() throws Exception {
super.close();
collector = null;
windowProcessor.close();
}
@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
RowData inputRow = element.getValue();
RowData currentKey = (RowData) getCurrentKey();
boolean isElementDropped = windowProcessor.processElement(currentKey, inputRow);
if (isElementDropped) {
lateRecordsDroppedRate.markEvent();
}
}
@Override
public void processWatermark(Watermark mark) throws Exception {
if (mark.getTimestamp() > currentWatermark) {
windowProcessor.advanceProgress(mark.getTimestamp());
super.processWatermark(mark);
} else {
super.processWatermark(new Watermark(currentWatermark));
}
}
@Override
public void onEventTime(InternalTimer<K, W> timer) throws Exception {
onTimer(timer);
}
@Override
public void onProcessingTime(InternalTimer<K, W> timer) throws Exception {
if (timer.getTimestamp() > lastTriggeredProcessingTime) {
lastTriggeredProcessingTime = timer.getTimestamp();
windowProcessor.advanceProgress(timer.getTimestamp());
}
onTimer(timer);
}
private void onTimer(InternalTimer<K, W> timer) throws Exception {
setCurrentKey(timer.getKey());
W window = timer.getNamespace();
windowProcessor.fireWindow(window);
windowProcessor.clearWindow(window);
}
@Override
public void prepareSnapshotPreBarrier(long checkpointId) throws Exception {
windowProcessor.prepareCheckpoint();
}
/** Context implementation for {@link SlicingWindowProcessor.Context}. */
private static final class WindowProcessorContext<W>
implements SlicingWindowProcessor.Context<W> {
private final Object operatorOwner;
private final MemoryManager memoryManager;
private final long memorySize;
private final InternalTimerService<W> timerService;
private final KeyedStateBackend<RowData> keyedStateBackend;
private final Output<RowData> collector;
private final RuntimeContext runtimeContext;
private WindowProcessorContext(
Object operatorOwner,
MemoryManager memoryManager,
long memorySize,
InternalTimerService<W> timerService,
KeyedStateBackend<RowData> keyedStateBackend,
Output<RowData> collector,
RuntimeContext runtimeContext) {
this.operatorOwner = operatorOwner;
this.memoryManager = memoryManager;
this.memorySize = memorySize;
this.timerService = timerService;
this.keyedStateBackend = checkNotNull(keyedStateBackend);
this.collector = checkNotNull(collector);
this.runtimeContext = checkNotNull(runtimeContext);
}
@Override
public Object getOperatorOwner() {
return operatorOwner;
}
@Override
public MemoryManager getMemoryManager() {
return memoryManager;
}
@Override
public long getMemorySize() {
return memorySize;
}
@Override
public KeyedStateBackend<RowData> getKeyedStateBackend() {
return keyedStateBackend;
}
@Override
public InternalTimerService<W> getTimerService() {
return timerService;
}
@Override
public void output(RowData result) {
collector.collect(result);
}
@Override
public RuntimeContext getRuntimeContext() {
return runtimeContext;
}
}
@VisibleForTesting
public Counter getNumLateRecordsDropped() {
return numLateRecordsDropped;
}
@VisibleForTesting
public Gauge<Long> getWatermarkLatency() {
return watermarkLatency;
}
} | class SlicingWindowOperator<K, W> extends TableStreamOperator<RowData>
implements OneInputStreamOperator<RowData, RowData>, Triggerable<K, W>, KeyContext {
private static final long serialVersionUID = 1L;
private static final String LATE_ELEMENTS_DROPPED_METRIC_NAME = "numLateRecordsDropped";
private static final String LATE_ELEMENTS_DROPPED_RATE_METRIC_NAME = "lateRecordsDroppedRate";
private static final String WATERMARK_LATENCY_METRIC_NAME = "watermarkLatency";
/** The concrete window operator implementation. */
private final SlicingWindowProcessor<W> windowProcessor;
/** This is used for emitting elements with a given timestamp. */
protected transient TimestampedCollector<RowData> collector;
/** The service to register timers. */
private transient InternalTimerService<W> internalTimerService;
/** The tracked processing time triggered last time. */
private transient long lastTriggeredProcessingTime;
/** The operator state to store watermark. */
private transient ListState<Long> watermarkState;
private transient Counter numLateRecordsDropped;
private transient Meter lateRecordsDroppedRate;
private transient Gauge<Long> watermarkLatency;
public SlicingWindowOperator(SlicingWindowProcessor<W> windowProcessor) {
this.windowProcessor = windowProcessor;
setChainingStrategy(ChainingStrategy.ALWAYS);
}
@Override
public void open() throws Exception {
super.open();
lastTriggeredProcessingTime = Long.MIN_VALUE;
collector = new TimestampedCollector<>(output);
collector.eraseTimestamp();
internalTimerService =
getInternalTimerService(
"window-timers", windowProcessor.createWindowSerializer(), this);
windowProcessor.open(
new WindowProcessorContext<>(
getContainingTask(),
getContainingTask().getEnvironment().getMemoryManager(),
computeMemorySize(),
internalTimerService,
getKeyedStateBackend(),
collector,
getRuntimeContext()));
windowProcessor.initializeWatermark(currentWatermark);
this.numLateRecordsDropped = metrics.counter(LATE_ELEMENTS_DROPPED_METRIC_NAME);
this.lateRecordsDroppedRate =
metrics.meter(
LATE_ELEMENTS_DROPPED_RATE_METRIC_NAME,
new MeterView(numLateRecordsDropped));
this.watermarkLatency =
metrics.gauge(
WATERMARK_LATENCY_METRIC_NAME,
() -> {
long watermark = internalTimerService.currentWatermark();
if (watermark < 0) {
return 0L;
} else {
return internalTimerService.currentProcessingTime() - watermark;
}
});
}
@Override
@Override
public void snapshotState(StateSnapshotContext context) throws Exception {
super.snapshotState(context);
this.watermarkState.clear();
this.watermarkState.add(currentWatermark);
}
@Override
public void close() throws Exception {
super.close();
collector = null;
windowProcessor.close();
}
@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
RowData inputRow = element.getValue();
RowData currentKey = (RowData) getCurrentKey();
boolean isElementDropped = windowProcessor.processElement(currentKey, inputRow);
if (isElementDropped) {
lateRecordsDroppedRate.markEvent();
}
}
@Override
public void processWatermark(Watermark mark) throws Exception {
if (mark.getTimestamp() > currentWatermark) {
windowProcessor.advanceProgress(mark.getTimestamp());
super.processWatermark(mark);
} else {
super.processWatermark(new Watermark(currentWatermark));
}
}
@Override
public void onEventTime(InternalTimer<K, W> timer) throws Exception {
onTimer(timer);
}
@Override
public void onProcessingTime(InternalTimer<K, W> timer) throws Exception {
if (timer.getTimestamp() > lastTriggeredProcessingTime) {
lastTriggeredProcessingTime = timer.getTimestamp();
windowProcessor.advanceProgress(timer.getTimestamp());
}
onTimer(timer);
}
private void onTimer(InternalTimer<K, W> timer) throws Exception {
setCurrentKey(timer.getKey());
W window = timer.getNamespace();
windowProcessor.fireWindow(window);
windowProcessor.clearWindow(window);
}
@Override
public void prepareSnapshotPreBarrier(long checkpointId) throws Exception {
windowProcessor.prepareCheckpoint();
}
/** Context implementation for {@link SlicingWindowProcessor.Context}. */
private static final class WindowProcessorContext<W>
implements SlicingWindowProcessor.Context<W> {
private final Object operatorOwner;
private final MemoryManager memoryManager;
private final long memorySize;
private final InternalTimerService<W> timerService;
private final KeyedStateBackend<RowData> keyedStateBackend;
private final Output<RowData> collector;
private final RuntimeContext runtimeContext;
private WindowProcessorContext(
Object operatorOwner,
MemoryManager memoryManager,
long memorySize,
InternalTimerService<W> timerService,
KeyedStateBackend<RowData> keyedStateBackend,
Output<RowData> collector,
RuntimeContext runtimeContext) {
this.operatorOwner = operatorOwner;
this.memoryManager = memoryManager;
this.memorySize = memorySize;
this.timerService = timerService;
this.keyedStateBackend = checkNotNull(keyedStateBackend);
this.collector = checkNotNull(collector);
this.runtimeContext = checkNotNull(runtimeContext);
}
@Override
public Object getOperatorOwner() {
return operatorOwner;
}
@Override
public MemoryManager getMemoryManager() {
return memoryManager;
}
@Override
public long getMemorySize() {
return memorySize;
}
@Override
public KeyedStateBackend<RowData> getKeyedStateBackend() {
return keyedStateBackend;
}
@Override
public InternalTimerService<W> getTimerService() {
return timerService;
}
@Override
public void output(RowData result) {
collector.collect(result);
}
@Override
public RuntimeContext getRuntimeContext() {
return runtimeContext;
}
}
@VisibleForTesting
public Counter getNumLateRecordsDropped() {
return numLateRecordsDropped;
}
@VisibleForTesting
public Gauge<Long> getWatermarkLatency() {
return watermarkLatency;
}
} |
What is the default timeout? Could we make sure it doesn't wait too much? | public void testContextPropagationInFaultTolerance() {
try {
RestAssured.defaultParser = Parser.TEXT;
Response response = RestAssured.when().get("/faultTolerance");
response.then().statusCode(200);
Assertions.assertEquals("fallback", response.body().asString());
Awaitility.await().until(() -> mockTracer.finishedSpans().size() == 5);
List<MockSpan> spans = mockTracer.finishedSpans();
Assertions.assertEquals(5, spans.size());
for (MockSpan mockSpan : spans) {
Assertions.assertEquals(spans.get(0).context().traceId(), mockSpan.context().traceId());
}
Assertions.assertEquals("ft", mockTracer.finishedSpans().get(0).operationName());
Assertions.assertEquals("ft", mockTracer.finishedSpans().get(1).operationName());
Assertions.assertEquals("ft", mockTracer.finishedSpans().get(2).operationName());
Assertions.assertEquals("io.quarkus.smallrye.opentracing.deployment.Service.fallback",
mockTracer.finishedSpans().get(3).operationName());
Assertions.assertEquals("io.quarkus.smallrye.opentracing.deployment.Service.faultTolerance",
mockTracer.finishedSpans().get(4).operationName());
} finally {
RestAssured.reset();
}
} | Awaitility.await().until(() -> mockTracer.finishedSpans().size() == 5); | public void testContextPropagationInFaultTolerance() {
try {
RestAssured.defaultParser = Parser.TEXT;
Response response = RestAssured.when().get("/faultTolerance");
response.then().statusCode(200);
Assertions.assertEquals("fallback", response.body().asString());
Awaitility.await().atMost(5, TimeUnit.SECONDS)
.until(() -> mockTracer.finishedSpans().size() == 5);
List<MockSpan> spans = mockTracer.finishedSpans();
Assertions.assertEquals(5, spans.size());
for (MockSpan mockSpan : spans) {
Assertions.assertEquals(spans.get(0).context().traceId(), mockSpan.context().traceId());
}
Assertions.assertEquals("ft", mockTracer.finishedSpans().get(0).operationName());
Assertions.assertEquals("ft", mockTracer.finishedSpans().get(1).operationName());
Assertions.assertEquals("ft", mockTracer.finishedSpans().get(2).operationName());
Assertions.assertEquals("io.quarkus.smallrye.opentracing.deployment.Service.fallback",
mockTracer.finishedSpans().get(3).operationName());
Assertions.assertEquals("io.quarkus.smallrye.opentracing.deployment.Service.faultTolerance",
mockTracer.finishedSpans().get(4).operationName());
} finally {
RestAssured.reset();
}
} | class TracingTest {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClass(TestResource.class)
.addClass(TracerRegistrar.class)
.addClass(Service.class)
.addClass(RestService.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
static MockTracer mockTracer = new MockTracer();
@AfterEach
public void after() {
mockTracer.reset();
}
@AfterAll
public static void afterAll() {
GlobalTracerTestUtil.resetGlobalTracer();
}
@Test
public void testSingleServerRequest() {
try {
RestAssured.defaultParser = Parser.TEXT;
RestAssured.when().get("/hello")
.then()
.statusCode(200);
Assertions.assertEquals(1, mockTracer.finishedSpans().size());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.hello",
mockTracer.finishedSpans().get(0).operationName());
} finally {
RestAssured.reset();
}
}
@Test
public void testCDI() {
try {
RestAssured.defaultParser = Parser.TEXT;
RestAssured.when().get("/cdi")
.then()
.statusCode(200);
Assertions.assertEquals(2, mockTracer.finishedSpans().size());
Assertions.assertEquals("io.quarkus.smallrye.opentracing.deployment.Service.foo",
mockTracer.finishedSpans().get(0).operationName());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.cdi",
mockTracer.finishedSpans().get(1).operationName());
} finally {
RestAssured.reset();
}
}
@Test
public void testMPRestClient() {
try {
RestAssured.defaultParser = Parser.TEXT;
RestAssured.when().get("/restClient")
.then()
.statusCode(200);
Assertions.assertEquals(3, mockTracer.finishedSpans().size());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.hello",
mockTracer.finishedSpans().get(0).operationName());
Assertions.assertEquals("GET", mockTracer.finishedSpans().get(1).operationName());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.restClient",
mockTracer.finishedSpans().get(2).operationName());
} finally {
RestAssured.reset();
}
}
@Test
} | class TracingTest {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClass(TestResource.class)
.addClass(TracerRegistrar.class)
.addClass(Service.class)
.addClass(RestService.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
static MockTracer mockTracer = new MockTracer();
@AfterEach
public void after() {
mockTracer.reset();
}
@AfterAll
public static void afterAll() {
GlobalTracerTestUtil.resetGlobalTracer();
}
@Test
public void testSingleServerRequest() {
try {
RestAssured.defaultParser = Parser.TEXT;
RestAssured.when().get("/hello")
.then()
.statusCode(200);
Assertions.assertEquals(1, mockTracer.finishedSpans().size());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.hello",
mockTracer.finishedSpans().get(0).operationName());
} finally {
RestAssured.reset();
}
}
@Test
public void testCDI() {
try {
RestAssured.defaultParser = Parser.TEXT;
RestAssured.when().get("/cdi")
.then()
.statusCode(200);
Assertions.assertEquals(2, mockTracer.finishedSpans().size());
Assertions.assertEquals("io.quarkus.smallrye.opentracing.deployment.Service.foo",
mockTracer.finishedSpans().get(0).operationName());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.cdi",
mockTracer.finishedSpans().get(1).operationName());
} finally {
RestAssured.reset();
}
}
@Test
public void testMPRestClient() {
try {
RestAssured.defaultParser = Parser.TEXT;
RestAssured.when().get("/restClient")
.then()
.statusCode(200);
Assertions.assertEquals(3, mockTracer.finishedSpans().size());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.hello",
mockTracer.finishedSpans().get(0).operationName());
Assertions.assertEquals("GET", mockTracer.finishedSpans().get(1).operationName());
Assertions.assertEquals("GET:io.quarkus.smallrye.opentracing.deployment.TestResource.restClient",
mockTracer.finishedSpans().get(2).operationName());
} finally {
RestAssured.reset();
}
}
@Test
} |
I have reverted this and updated the logic 35169c467dc78f97ec3a6633f3fe9ded0aecd86c | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
invocation.expr = rewriteExpr(invocation.expr);
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr;
if (queryExpr.stmtExpr == null) {
stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
queryExpr.stmtExpr = stmtExpr;
} else {
stmtExpr = queryExpr.stmtExpr;
}
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | stmtExpr = queryExpr.stmtExpr; | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
if (invocation.langLibInvocation && !invocation.requiredArgs.isEmpty()) {
invocation.expr = invocation.requiredArgs.get(0);
} else {
invocation.expr = rewriteExpr(invocation.expr);
}
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
Create application is only intended to be called before the first instance for a given application name is deployed. The instance id validation must be performed somewhere else. | public Application createApplication(ApplicationId id, Optional<NToken> token) {
if ( ! (id.instance().value().equals("default") || id.instance().value().matches("\\d+")))
throw new UnsupportedOperationException("Only the instance names 'default' and names which are just the PR number are supported at the moment");
try (Lock lock = lock(id)) {
com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId.validate(id.application().value());
Optional<Tenant> tenant = controller.tenants().tenant(new TenantId(id.tenant().value()));
if ( ! tenant.isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': This tenant does not exist");
if (get(id).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application already exists");
if (get(dashToUnderscore(id)).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application " + dashToUnderscore(id) + " already exists");
if (tenant.get().isAthensTenant()) {
if ( ! token.isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': No NToken provided");
ZmsClient zmsClient = zmsClientFactory.createZmsClientWithAuthorizedServiceToken(token.get());
try {
zmsClient.deleteApplication(tenant.get().getAthensDomain().get(),
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
}
catch (ZmsException ignored) { }
zmsClient.addApplication(tenant.get().getAthensDomain().get(),
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
}
LockedApplication application = new LockedApplication(new Application(id), lock);
store(application);
log.info("Created " + application);
return application;
}
} | if ( ! (id.instance().value().equals("default") || id.instance().value().matches("\\d+"))) | public Application createApplication(ApplicationId id, Optional<NToken> token) {
if ( ! (id.instance().isDefault() || id.instance().value().matches("\\d+")))
throw new UnsupportedOperationException("Only the instance names 'default' and names which are just the PR number are supported at the moment");
try (Lock lock = lock(id)) {
if (asList(id.tenant()).stream().noneMatch(application -> application.id().application().equals(id.application())))
com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId.validate(id.application().value());
Optional<Tenant> tenant = controller.tenants().tenant(new TenantId(id.tenant().value()));
if ( ! tenant.isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': This tenant does not exist");
if (get(id).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application already exists");
if (get(dashToUnderscore(id)).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application " + dashToUnderscore(id) + " already exists");
if (id.instance().isDefault() && tenant.get().isAthensTenant()) {
if ( ! token.isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': No NToken provided");
ZmsClient zmsClient = zmsClientFactory.createZmsClientWithAuthorizedServiceToken(token.get());
zmsClient.addApplication(tenant.get().getAthensDomain().get(),
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
}
LockedApplication application = new LockedApplication(new Application(id), lock);
store(application);
log.info("Created " + application);
return application;
}
} | class ApplicationController {
private static final Logger log = Logger.getLogger(ApplicationController.class.getName());
/** The controller owning this */
private final Controller controller;
/** For permanent storage */
private final ControllerDb db;
/** For working memory storage and sharing between controllers */
private final CuratorDb curator;
private final ArtifactRepository artifactRepository;
private final RotationRepository rotationRepository;
private final AthenzClientFactory zmsClientFactory;
private final NameService nameService;
private final ConfigServerClient configserverClient;
private final RoutingGenerator routingGenerator;
private final Clock clock;
private final DeploymentTrigger deploymentTrigger;
ApplicationController(Controller controller, ControllerDb db, CuratorDb curator,
AthenzClientFactory zmsClientFactory, RotationsConfig rotationsConfig,
NameService nameService, ConfigServerClient configserverClient,
ArtifactRepository artifactRepository,
RoutingGenerator routingGenerator, Clock clock) {
this.controller = controller;
this.db = db;
this.curator = curator;
this.zmsClientFactory = zmsClientFactory;
this.nameService = nameService;
this.configserverClient = configserverClient;
this.routingGenerator = routingGenerator;
this.clock = clock;
this.artifactRepository = artifactRepository;
this.rotationRepository = new RotationRepository(rotationsConfig, this, curator);
this.deploymentTrigger = new DeploymentTrigger(controller, curator, clock);
for (Application application : db.listApplications()) {
lockIfPresent(application.id(), this::store);
}
}
/** Returns the application with the given id, or null if it is not present */
public Optional<Application> get(ApplicationId id) {
return db.getApplication(id);
}
/**
* Returns the application with the given id
*
* @throws IllegalArgumentException if it does not exist
*/
public Application require(ApplicationId id) {
return get(id).orElseThrow(() -> new IllegalArgumentException(id + " not found"));
}
/** Returns a snapshot of all applications */
public List<Application> asList() {
return db.listApplications();
}
/** Returns all applications of a tenant */
public List<Application> asList(TenantName tenant) {
return db.listApplications(new TenantId(tenant.value()));
}
/**
* Set the rotations marked as 'global' either 'in' or 'out of' service.
*
* @return The canonical endpoint altered if any
* @throws IOException if rotation status cannot be updated
*/
public List<String> setGlobalRotationStatus(DeploymentId deploymentId, EndpointStatus status) throws IOException {
List<String> rotations = new ArrayList<>();
Optional<String> endpoint = getCanonicalGlobalEndpoint(deploymentId);
if (endpoint.isPresent()) {
configserverClient.setGlobalRotationStatus(deploymentId, endpoint.get(), status);
rotations.add(endpoint.get());
}
return rotations;
}
/**
* Get the endpoint status for the global endpoint of this application
*
* @return Map between the endpoint and the rotation status
* @throws IOException if global rotation status cannot be determined
*/
public Map<String, EndpointStatus> getGlobalRotationStatus(DeploymentId deploymentId) throws IOException {
Map<String, EndpointStatus> result = new HashMap<>();
Optional<String> endpoint = getCanonicalGlobalEndpoint(deploymentId);
if (endpoint.isPresent()) {
EndpointStatus status = configserverClient.getGlobalRotationStatus(deploymentId, endpoint.get());
result.put(endpoint.get(), status);
}
return result;
}
/**
* Global rotations (plural as we can have aliases) map to exactly one service endpoint.
* This method finds that one service endpoint and strips the URI part that
* the routingGenerator is wrapping around the endpoint.
*
* @param deploymentId The deployment to retrieve global service endpoint for
* @return Empty if no global endpoint exist, otherwise the service endpoint ([clustername.]app.tenant.region.env)
*/
Optional<String> getCanonicalGlobalEndpoint(DeploymentId deploymentId) throws IOException {
Map<String, RoutingEndpoint> hostToGlobalEndpoint = new HashMap<>();
Map<String, String> hostToCanonicalEndpoint = new HashMap<>();
for (RoutingEndpoint endpoint : routingGenerator.endpoints(deploymentId)) {
try {
URI uri = new URI(endpoint.getEndpoint());
String serviceEndpoint = uri.getHost();
if (serviceEndpoint == null) {
throw new IOException("Unexpected endpoints returned from the Routing Generator");
}
String canonicalEndpoint = serviceEndpoint.replaceAll(".vespa.yahooapis.com", "");
String hostname = endpoint.getHostname();
if (hostname != null) {
if (endpoint.isGlobal()) {
hostToGlobalEndpoint.put(hostname, endpoint);
} else {
hostToCanonicalEndpoint.put(hostname, canonicalEndpoint);
}
if (hostToGlobalEndpoint.containsKey(hostname) && hostToCanonicalEndpoint.containsKey(hostname)) {
return Optional.of(hostToCanonicalEndpoint.get(hostname));
}
}
} catch (URISyntaxException use) {
throw new IOException(use);
}
}
return Optional.empty();
}
/**
* Creates a new application for an existing tenant.
*
* @throws IllegalArgumentException if the application already exists
*/
/** Deploys an application. If the application does not exist it is created. */
public ActivateResult deployApplication(ApplicationId applicationId, ZoneId zone,
Optional<ApplicationPackage> applicationPackageFromDeployer,
DeployOptions options,
Optional<NToken> token) {
try (Lock lock = lock(applicationId)) {
LockedApplication application = get(applicationId)
.map(app -> new LockedApplication(app, lock))
.orElseGet(() -> new LockedApplication(createApplication(applicationId, token), lock));
Version version;
if (options.deployCurrentVersion) {
version = application.versionIn(zone, controller);
} else if (canDeployDirectlyTo(zone, options)) {
version = options.vespaVersion.map(Version::new).orElse(controller.systemVersion());
} else if (! application.change().isPresent() && ! zone.environment().isManuallyDeployed()) {
return unexpectedDeployment(applicationId, zone, applicationPackageFromDeployer);
} else {
version = application.deployVersionIn(zone, controller);
}
ApplicationVersion applicationVersion;
ApplicationPackage applicationPackage;
Optional<DeploymentJobs.JobType> job = DeploymentJobs.JobType.from(controller.system(), zone);
if (canDownloadReportedApplicationVersion(application) && !canDeployDirectlyTo(zone, options)) {
if (!job.isPresent()) {
throw new IllegalArgumentException("Cannot determine job for zone " + zone);
}
applicationVersion = application.deployApplicationVersion(job.get(), controller,
options.deployCurrentVersion)
.orElseThrow(() -> new IllegalArgumentException("Cannot determine application version for " + applicationId));
if (canDownloadArtifact(applicationVersion)) {
applicationPackage = new ApplicationPackage(
artifactRepository.getApplicationPackage(applicationId, applicationVersion.id())
);
} else {
applicationPackage = applicationPackageFromDeployer.orElseThrow(
() -> new IllegalArgumentException("Application package with version " +
applicationVersion.id() + " cannot be downloaded, and " +
"no package was given by deployer"));
}
} else {
applicationPackage = applicationPackageFromDeployer.orElseThrow(
() -> new IllegalArgumentException("Application package must be given as new application " +
"version is not known for " + applicationId)
);
applicationVersion = toApplicationPackageRevision(applicationPackage, options.screwdriverBuildJob);
}
validate(applicationPackage.deploymentSpec());
if (!options.deployCurrentVersion && !canDownloadReportedApplicationVersion(application)) {
if (application.change().application().isPresent()) {
application = application.withChange(application.change().with(applicationVersion));
}
if (!canDeployDirectlyTo(zone, options) && job.isPresent()) {
JobStatus.JobRun triggering = getOrCreateTriggering(application, version, job.get());
application = application.withJobTriggering(job.get(),
application.change(),
triggering.at(),
version,
applicationVersion,
triggering.reason());
}
}
if (!options.deployCurrentVersion) {
application = application.with(applicationPackage.deploymentSpec());
application = application.with(applicationPackage.validationOverrides());
application = deleteRemovedDeployments(application);
application = deleteUnreferencedDeploymentJobs(application);
store(application);
}
if (!canDeployDirectlyTo(zone, options)) {
if (!application.deploymentJobs().isDeployableTo(zone.environment(), application.change())) {
throw new IllegalArgumentException("Rejecting deployment of " + application + " to " + zone +
" as " + application.change() + " is not tested");
}
Deployment existingDeployment = application.deployments().get(zone);
if (zone.environment().isProduction() && existingDeployment != null &&
existingDeployment.version().isAfter(version)) {
throw new IllegalArgumentException("Rejecting deployment of " + application + " to " + zone +
" as the requested version " + version + " is older than" +
" the current version " + existingDeployment.version());
}
}
application = withRotation(application, zone);
Set<String> rotationNames = new HashSet<>();
Set<String> cnames = new HashSet<>();
application.rotation().ifPresent(applicationRotation -> {
rotationNames.add(applicationRotation.id().asString());
cnames.add(applicationRotation.dnsName());
cnames.add(applicationRotation.secureDnsName());
});
options = withVersion(version, options);
ConfigServerClient.PreparedApplication preparedApplication =
configserverClient.prepare(new DeploymentId(applicationId, zone), options, cnames, rotationNames,
applicationPackage.zippedContent());
preparedApplication.activate();
application = application.withNewDeployment(zone, applicationVersion, version, clock.instant());
store(application);
return new ActivateResult(new RevisionId(applicationPackage.hash()), preparedApplication.prepareResponse(),
applicationPackage.zippedContent().length);
}
}
/** Makes sure the application has a global rotation, if eligible. */
private LockedApplication withRotation(LockedApplication application, ZoneId zone) {
if (zone.environment() == Environment.prod && application.deploymentSpec().globalServiceId().isPresent()) {
try (RotationLock rotationLock = rotationRepository.lock()) {
Rotation rotation = rotationRepository.getRotation(application, rotationLock);
application = application.with(rotation.id());
store(application);
registerRotationInDns(rotation, application.rotation().get().dnsName());
registerRotationInDns(rotation, application.rotation().get().secureDnsName());
}
}
return application;
}
private ActivateResult unexpectedDeployment(ApplicationId applicationId, ZoneId zone,
Optional<ApplicationPackage> applicationPackage) {
Log logEntry = new Log();
logEntry.level = "WARNING";
logEntry.time = clock.instant().toEpochMilli();
logEntry.message = "Ignoring deployment of " + require(applicationId) + " to " + zone +
" as a deployment is not currently expected";
PrepareResponse prepareResponse = new PrepareResponse();
prepareResponse.log = Collections.singletonList(logEntry);
prepareResponse.configChangeActions = new ConfigChangeActions(Collections.emptyList(), Collections.emptyList());
return new ActivateResult(new RevisionId(applicationPackage.map(ApplicationPackage::hash)
.orElse("0")), prepareResponse,
applicationPackage.map(a -> a.zippedContent().length).orElse(0));
}
private LockedApplication deleteRemovedDeployments(LockedApplication application) {
List<Deployment> deploymentsToRemove = application.productionDeployments().values().stream()
.filter(deployment -> ! application.deploymentSpec().includes(deployment.zone().environment(),
Optional.of(deployment.zone().region())))
.collect(Collectors.toList());
if (deploymentsToRemove.isEmpty()) return application;
if ( ! application.validationOverrides().allows(ValidationId.deploymentRemoval, clock.instant()))
throw new IllegalArgumentException(ValidationId.deploymentRemoval.value() + ": " + application +
" is deployed in " +
deploymentsToRemove.stream()
.map(deployment -> deployment.zone().region().value())
.collect(Collectors.joining(", ")) +
", but does not include " +
(deploymentsToRemove.size() > 1 ? "these zones" : "this zone") +
" in deployment.xml");
LockedApplication applicationWithRemoval = application;
for (Deployment deployment : deploymentsToRemove)
applicationWithRemoval = deactivate(applicationWithRemoval, deployment.zone());
return applicationWithRemoval;
}
private LockedApplication deleteUnreferencedDeploymentJobs(LockedApplication application) {
for (DeploymentJobs.JobType job : application.deploymentJobs().jobStatus().keySet()) {
Optional<ZoneId> zone = job.zone(controller.system());
if ( ! job.isProduction() || (zone.isPresent() && application.deploymentSpec().includes(zone.get().environment(), zone.map(ZoneId::region))))
continue;
application = application.withoutDeploymentJob(job);
}
return application;
}
/**
* Returns the existing triggering of the given type from this application,
* or an incomplete one created in this method if none is present
* This is needed (only) in the case where some external entity triggers a job.
*/
private JobStatus.JobRun getOrCreateTriggering(Application application, Version version, DeploymentJobs.JobType jobType) {
JobStatus status = application.deploymentJobs().jobStatus().get(jobType);
if (status == null) return incompleteTriggeringEvent(version);
if ( ! status.lastTriggered().isPresent()) return incompleteTriggeringEvent(version);
return status.lastTriggered().get();
}
private JobStatus.JobRun incompleteTriggeringEvent(Version version) {
return new JobStatus.JobRun(-1, version, ApplicationVersion.unknown, false, "", clock.instant());
}
private DeployOptions withVersion(Version version, DeployOptions options) {
return new DeployOptions(options.screwdriverBuildJob,
Optional.of(version),
options.ignoreValidationErrors,
options.deployCurrentVersion);
}
private ApplicationVersion toApplicationPackageRevision(ApplicationPackage applicationPackage,
Optional<ScrewdriverBuildJob> buildJob) {
if ( ! buildJob.isPresent())
return ApplicationVersion.from(applicationPackage.hash());
GitRevision gitRevision = buildJob.get().gitRevision;
if (gitRevision.repository == null || gitRevision.branch == null || gitRevision.commit == null)
return ApplicationVersion.from(applicationPackage.hash());
return ApplicationVersion.from(applicationPackage.hash(), new SourceRevision(gitRevision.repository.id(),
gitRevision.branch.id(),
gitRevision.commit.id()));
}
/** Register a DNS name for rotation */
private void registerRotationInDns(Rotation rotation, String dnsName) {
try {
Optional<Record> record = nameService.findRecord(Record.Type.CNAME, RecordName.from(dnsName));
RecordData rotationName = RecordData.fqdn(rotation.name());
if (record.isPresent()) {
if ( ! record.get().data().equals(rotationName)) {
nameService.updateRecord(record.get().id(), rotationName);
log.info("Updated mapping for record ID " + record.get().id().asString() + ": '" + dnsName
+ "' -> '" + rotation.name() + "'");
}
} else {
RecordId id = nameService.createCname(RecordName.from(dnsName), rotationName);
log.info("Registered mapping with record ID " + id.asString() + ": '" + dnsName + "' -> '"
+ rotation.name() + "'");
}
} catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to register CNAME", e);
}
}
/** Returns the endpoints of the deployment, or empty if obtaining them failed */
public Optional<InstanceEndpoints> getDeploymentEndpoints(DeploymentId deploymentId) {
try {
List<RoutingEndpoint> endpoints = routingGenerator.endpoints(deploymentId);
List<URI> endPointUrls = new ArrayList<>();
for (RoutingEndpoint endpoint : endpoints) {
try {
endPointUrls.add(new URI(endpoint.getEndpoint()));
} catch (URISyntaxException e) {
throw new RuntimeException("Routing generator returned illegal url's", e);
}
}
return Optional.of(new InstanceEndpoints(endPointUrls));
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to get endpoint information for " + deploymentId + ": "
+ Exceptions.toMessageString(e));
return Optional.empty();
}
}
/**
* Deletes the the given application. All known instances of the applications will be deleted,
* including PR instances.
*
* @throws IllegalArgumentException if the application has deployments or the caller is not authorized
* @throws NotExistsException if no instances of the application exist
*/
public void deleteApplication(ApplicationId applicationId, Optional<NToken> token) {
List<ApplicationId> instances = controller.applications().asList(applicationId.tenant())
.stream()
.map(Application::id)
.filter(id -> id.application().equals(applicationId.application()) &&
id.tenant().equals(applicationId.tenant()))
.collect(Collectors.toList());
if (instances.isEmpty()) {
throw new NotExistsException("Could not delete application '" + applicationId + "': Application not found");
}
instances.forEach(id -> lockOrThrow(id, application -> {
if ( ! application.deployments().isEmpty())
throw new IllegalArgumentException("Could not delete '" + application + "': It has active deployments");
Tenant tenant = controller.tenants().tenant(new TenantId(id.tenant().value())).get();
if (tenant.isAthensTenant() && ! token.isPresent())
throw new IllegalArgumentException("Could not delete '" + application + "': No NToken provided");
if (id.instance().isDefault() && tenant.isAthensTenant()) {
zmsClientFactory.createZmsClientWithAuthorizedServiceToken(token.get())
.deleteApplication(tenant.getAthensDomain().get(),
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
}
db.deleteApplication(id);
log.info("Deleted " + application);
}));
}
/**
* Replace any previous version of this application by this instance
*
* @param application a locked application to store
*/
public void store(LockedApplication application) {
db.store(application);
}
/**
* Acquire a locked application to modify and store, if there is an application with the given id.
*
* @param applicationId ID of the application to lock and get.
* @param action Function which acts on the locked application.
*/
public void lockIfPresent(ApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
get(applicationId).map(application -> new LockedApplication(application, lock)).ifPresent(action);
}
}
/**
* Acquire a locked application to modify and store, or throw an exception if no application has the given id.
*
* @param applicationId ID of the application to lock and require.
* @param action Function which acts on the locked application.
* @throws IllegalArgumentException when application does not exist.
*/
public void lockOrThrow(ApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
action.accept(new LockedApplication(require(applicationId), lock));
}
}
public void notifyJobCompletion(JobReport report) {
if ( ! get(report.applicationId()).isPresent()) {
log.log(Level.WARNING, "Ignoring completion of job of project '" + report.projectId() +
"': Unknown application '" + report.applicationId() + "'");
return;
}
deploymentTrigger.triggerFromCompletion(report);
}
/**
* Tells config server to schedule a restart of all nodes in this deployment
*
* @param hostname If non-empty, restart will only be scheduled for this host
*/
public void restart(DeploymentId deploymentId, Optional<Hostname> hostname) {
try {
configserverClient.restart(deploymentId, hostname);
}
catch (NoInstanceException e) {
throw new IllegalArgumentException("Could not restart " + deploymentId + ": No such deployment");
}
}
/** Deactivate application in the given zone */
public void deactivate(Application application, ZoneId zone) {
deactivate(application, zone, Optional.empty(), false);
}
/** Deactivate a known deployment of the given application */
public void deactivate(Application application, Deployment deployment, boolean requireThatDeploymentHasExpired) {
deactivate(application, deployment.zone(), Optional.of(deployment), requireThatDeploymentHasExpired);
}
private void deactivate(Application application, ZoneId zone, Optional<Deployment> deployment,
boolean requireThatDeploymentHasExpired) {
if (requireThatDeploymentHasExpired && deployment.isPresent()
&& ! DeploymentExpirer.hasExpired(controller.zoneRegistry(), deployment.get(), clock.instant()))
return;
lockOrThrow(application.id(), lockedApplication ->
store(deactivate(lockedApplication, zone)));
}
/**
* Deactivates a locked application without storing it
*
* @return the application with the deployment in the given zone removed
*/
private LockedApplication deactivate(LockedApplication application, ZoneId zone) {
try {
configserverClient.deactivate(new DeploymentId(application.id(), zone));
}
catch (NoInstanceException ignored) {
}
return application.withoutDeploymentIn(zone);
}
public DeploymentTrigger deploymentTrigger() { return deploymentTrigger; }
private ApplicationId dashToUnderscore(ApplicationId id) {
return ApplicationId.from(id.tenant().value(),
id.application().value().replaceAll("-", "_"),
id.instance().value());
}
public ConfigServerClient configserverClient() { return configserverClient; }
/**
* Returns a lock which provides exclusive rights to changing this application.
* Any operation which stores an application need to first acquire this lock, then read, modify
* and store the application, and finally release (close) the lock.
*/
Lock lock(ApplicationId application) {
return curator.lock(application, Duration.ofMinutes(10));
}
/** Returns whether a direct deployment to given zone is allowed */
private static boolean canDeployDirectlyTo(ZoneId zone, DeployOptions options) {
return ! options.screwdriverBuildJob.isPresent() ||
options.screwdriverBuildJob.get().screwdriverId == null ||
zone.environment().isManuallyDeployed();
}
/** Returns whether artifact for given version number is available in artifact repository */
private static boolean canDownloadArtifact(ApplicationVersion applicationVersion) {
return applicationVersion.buildNumber().isPresent() && applicationVersion.source().isPresent();
}
/** Returns whether component has reported a version number that is availabe in artifact repository */
private static boolean canDownloadReportedApplicationVersion(Application application) {
return Optional.ofNullable(application.deploymentJobs().jobStatus().get(DeploymentJobs.JobType.component))
.flatMap(JobStatus::lastSuccess)
.map(JobStatus.JobRun::applicationVersion)
.filter(ApplicationController::canDownloadArtifact)
.isPresent();
}
/** Verify that each of the production zones listed in the deployment spec exist in this system. */
private void validate(DeploymentSpec deploymentSpec) {
deploymentSpec.zones().stream()
.filter(zone -> zone.environment() == Environment.prod)
.forEach(zone -> {
if ( ! controller.zoneRegistry().hasZone(ZoneId.from(zone.environment(), zone.region().orElse(null))))
throw new IllegalArgumentException("Zone " + zone + " in deployment spec was not found in this system!");
});
}
public RotationRepository rotationRepository() {
return rotationRepository;
}
} | class ApplicationController {
private static final Logger log = Logger.getLogger(ApplicationController.class.getName());
/** The controller owning this */
private final Controller controller;
/** For permanent storage */
private final ControllerDb db;
/** For working memory storage and sharing between controllers */
private final CuratorDb curator;
private final ArtifactRepository artifactRepository;
private final RotationRepository rotationRepository;
private final AthenzClientFactory zmsClientFactory;
private final NameService nameService;
private final ConfigServerClient configserverClient;
private final RoutingGenerator routingGenerator;
private final Clock clock;
private final DeploymentTrigger deploymentTrigger;
ApplicationController(Controller controller, ControllerDb db, CuratorDb curator,
AthenzClientFactory zmsClientFactory, RotationsConfig rotationsConfig,
NameService nameService, ConfigServerClient configserverClient,
ArtifactRepository artifactRepository,
RoutingGenerator routingGenerator, Clock clock) {
this.controller = controller;
this.db = db;
this.curator = curator;
this.zmsClientFactory = zmsClientFactory;
this.nameService = nameService;
this.configserverClient = configserverClient;
this.routingGenerator = routingGenerator;
this.clock = clock;
this.artifactRepository = artifactRepository;
this.rotationRepository = new RotationRepository(rotationsConfig, this, curator);
this.deploymentTrigger = new DeploymentTrigger(controller, curator, clock);
for (Application application : db.listApplications()) {
lockIfPresent(application.id(), this::store);
}
}
/** Returns the application with the given id, or null if it is not present */
public Optional<Application> get(ApplicationId id) {
return db.getApplication(id);
}
/**
* Returns the application with the given id
*
* @throws IllegalArgumentException if it does not exist
*/
public Application require(ApplicationId id) {
return get(id).orElseThrow(() -> new IllegalArgumentException(id + " not found"));
}
/** Returns a snapshot of all applications */
public List<Application> asList() {
return db.listApplications();
}
/** Returns all applications of a tenant */
public List<Application> asList(TenantName tenant) {
return db.listApplications(new TenantId(tenant.value()));
}
/**
* Set the rotations marked as 'global' either 'in' or 'out of' service.
*
* @return The canonical endpoint altered if any
* @throws IOException if rotation status cannot be updated
*/
public List<String> setGlobalRotationStatus(DeploymentId deploymentId, EndpointStatus status) throws IOException {
List<String> rotations = new ArrayList<>();
Optional<String> endpoint = getCanonicalGlobalEndpoint(deploymentId);
if (endpoint.isPresent()) {
configserverClient.setGlobalRotationStatus(deploymentId, endpoint.get(), status);
rotations.add(endpoint.get());
}
return rotations;
}
/**
* Get the endpoint status for the global endpoint of this application
*
* @return Map between the endpoint and the rotation status
* @throws IOException if global rotation status cannot be determined
*/
public Map<String, EndpointStatus> getGlobalRotationStatus(DeploymentId deploymentId) throws IOException {
Map<String, EndpointStatus> result = new HashMap<>();
Optional<String> endpoint = getCanonicalGlobalEndpoint(deploymentId);
if (endpoint.isPresent()) {
EndpointStatus status = configserverClient.getGlobalRotationStatus(deploymentId, endpoint.get());
result.put(endpoint.get(), status);
}
return result;
}
/**
* Global rotations (plural as we can have aliases) map to exactly one service endpoint.
* This method finds that one service endpoint and strips the URI part that
* the routingGenerator is wrapping around the endpoint.
*
* @param deploymentId The deployment to retrieve global service endpoint for
* @return Empty if no global endpoint exist, otherwise the service endpoint ([clustername.]app.tenant.region.env)
*/
Optional<String> getCanonicalGlobalEndpoint(DeploymentId deploymentId) throws IOException {
Map<String, RoutingEndpoint> hostToGlobalEndpoint = new HashMap<>();
Map<String, String> hostToCanonicalEndpoint = new HashMap<>();
for (RoutingEndpoint endpoint : routingGenerator.endpoints(deploymentId)) {
try {
URI uri = new URI(endpoint.getEndpoint());
String serviceEndpoint = uri.getHost();
if (serviceEndpoint == null) {
throw new IOException("Unexpected endpoints returned from the Routing Generator");
}
String canonicalEndpoint = serviceEndpoint.replaceAll(".vespa.yahooapis.com", "");
String hostname = endpoint.getHostname();
if (hostname != null) {
if (endpoint.isGlobal()) {
hostToGlobalEndpoint.put(hostname, endpoint);
} else {
hostToCanonicalEndpoint.put(hostname, canonicalEndpoint);
}
if (hostToGlobalEndpoint.containsKey(hostname) && hostToCanonicalEndpoint.containsKey(hostname)) {
return Optional.of(hostToCanonicalEndpoint.get(hostname));
}
}
} catch (URISyntaxException use) {
throw new IOException(use);
}
}
return Optional.empty();
}
/**
* Creates a new application for an existing tenant.
*
* @throws IllegalArgumentException if the application already exists
*/
/** Deploys an application. If the application does not exist it is created. */
public ActivateResult deployApplication(ApplicationId applicationId, ZoneId zone,
Optional<ApplicationPackage> applicationPackageFromDeployer,
DeployOptions options) {
try (Lock lock = lock(applicationId)) {
LockedApplication application = get(applicationId)
.map(app -> new LockedApplication(app, lock))
.orElseGet(() -> new LockedApplication(createApplication(applicationId, Optional.empty()), lock));
Version version;
if (options.deployCurrentVersion) {
version = application.versionIn(zone, controller);
} else if (canDeployDirectlyTo(zone, options)) {
version = options.vespaVersion.map(Version::new).orElse(controller.systemVersion());
} else if (! application.change().isPresent() && ! zone.environment().isManuallyDeployed()) {
return unexpectedDeployment(applicationId, zone, applicationPackageFromDeployer);
} else {
version = application.deployVersionIn(zone, controller);
}
ApplicationVersion applicationVersion;
ApplicationPackage applicationPackage;
Optional<DeploymentJobs.JobType> job = DeploymentJobs.JobType.from(controller.system(), zone);
if (canDownloadReportedApplicationVersion(application) && !canDeployDirectlyTo(zone, options)) {
if (!job.isPresent()) {
throw new IllegalArgumentException("Cannot determine job for zone " + zone);
}
applicationVersion = application.deployApplicationVersion(job.get(), controller,
options.deployCurrentVersion)
.orElseThrow(() -> new IllegalArgumentException("Cannot determine application version for " + applicationId));
if (canDownloadArtifact(applicationVersion)) {
applicationPackage = new ApplicationPackage(
artifactRepository.getApplicationPackage(applicationId, applicationVersion.id())
);
} else {
applicationPackage = applicationPackageFromDeployer.orElseThrow(
() -> new IllegalArgumentException("Application package with version " +
applicationVersion.id() + " cannot be downloaded, and " +
"no package was given by deployer"));
}
} else {
applicationPackage = applicationPackageFromDeployer.orElseThrow(
() -> new IllegalArgumentException("Application package must be given as new application " +
"version is not known for " + applicationId)
);
applicationVersion = toApplicationPackageRevision(applicationPackage, options.screwdriverBuildJob);
}
validate(applicationPackage.deploymentSpec());
if (!options.deployCurrentVersion && !canDownloadReportedApplicationVersion(application)) {
if (application.change().application().isPresent()) {
application = application.withChange(application.change().with(applicationVersion));
}
if (!canDeployDirectlyTo(zone, options) && job.isPresent()) {
JobStatus.JobRun triggering = getOrCreateTriggering(application, version, job.get());
application = application.withJobTriggering(job.get(),
application.change(),
triggering.at(),
version,
applicationVersion,
triggering.reason());
}
}
if (!options.deployCurrentVersion) {
application = application.with(applicationPackage.deploymentSpec());
application = application.with(applicationPackage.validationOverrides());
application = deleteRemovedDeployments(application);
application = deleteUnreferencedDeploymentJobs(application);
store(application);
}
if (!canDeployDirectlyTo(zone, options)) {
if (!application.deploymentJobs().isDeployableTo(zone.environment(), application.change())) {
throw new IllegalArgumentException("Rejecting deployment of " + application + " to " + zone +
" as " + application.change() + " is not tested");
}
Deployment existingDeployment = application.deployments().get(zone);
if (zone.environment().isProduction() && existingDeployment != null &&
existingDeployment.version().isAfter(version)) {
throw new IllegalArgumentException("Rejecting deployment of " + application + " to " + zone +
" as the requested version " + version + " is older than" +
" the current version " + existingDeployment.version());
}
}
application = withRotation(application, zone);
Set<String> rotationNames = new HashSet<>();
Set<String> cnames = new HashSet<>();
application.rotation().ifPresent(applicationRotation -> {
rotationNames.add(applicationRotation.id().asString());
cnames.add(applicationRotation.dnsName());
cnames.add(applicationRotation.secureDnsName());
});
options = withVersion(version, options);
ConfigServerClient.PreparedApplication preparedApplication =
configserverClient.prepare(new DeploymentId(applicationId, zone), options, cnames, rotationNames,
applicationPackage.zippedContent());
preparedApplication.activate();
application = application.withNewDeployment(zone, applicationVersion, version, clock.instant());
store(application);
return new ActivateResult(new RevisionId(applicationPackage.hash()), preparedApplication.prepareResponse(),
applicationPackage.zippedContent().length);
}
}
/** Makes sure the application has a global rotation, if eligible. */
private LockedApplication withRotation(LockedApplication application, ZoneId zone) {
if (zone.environment() == Environment.prod && application.deploymentSpec().globalServiceId().isPresent()) {
try (RotationLock rotationLock = rotationRepository.lock()) {
Rotation rotation = rotationRepository.getRotation(application, rotationLock);
application = application.with(rotation.id());
store(application);
registerRotationInDns(rotation, application.rotation().get().dnsName());
registerRotationInDns(rotation, application.rotation().get().secureDnsName());
}
}
return application;
}
private ActivateResult unexpectedDeployment(ApplicationId applicationId, ZoneId zone,
Optional<ApplicationPackage> applicationPackage) {
Log logEntry = new Log();
logEntry.level = "WARNING";
logEntry.time = clock.instant().toEpochMilli();
logEntry.message = "Ignoring deployment of " + require(applicationId) + " to " + zone +
" as a deployment is not currently expected";
PrepareResponse prepareResponse = new PrepareResponse();
prepareResponse.log = Collections.singletonList(logEntry);
prepareResponse.configChangeActions = new ConfigChangeActions(Collections.emptyList(), Collections.emptyList());
return new ActivateResult(new RevisionId(applicationPackage.map(ApplicationPackage::hash)
.orElse("0")), prepareResponse,
applicationPackage.map(a -> a.zippedContent().length).orElse(0));
}
private LockedApplication deleteRemovedDeployments(LockedApplication application) {
List<Deployment> deploymentsToRemove = application.productionDeployments().values().stream()
.filter(deployment -> ! application.deploymentSpec().includes(deployment.zone().environment(),
Optional.of(deployment.zone().region())))
.collect(Collectors.toList());
if (deploymentsToRemove.isEmpty()) return application;
if ( ! application.validationOverrides().allows(ValidationId.deploymentRemoval, clock.instant()))
throw new IllegalArgumentException(ValidationId.deploymentRemoval.value() + ": " + application +
" is deployed in " +
deploymentsToRemove.stream()
.map(deployment -> deployment.zone().region().value())
.collect(Collectors.joining(", ")) +
", but does not include " +
(deploymentsToRemove.size() > 1 ? "these zones" : "this zone") +
" in deployment.xml");
LockedApplication applicationWithRemoval = application;
for (Deployment deployment : deploymentsToRemove)
applicationWithRemoval = deactivate(applicationWithRemoval, deployment.zone());
return applicationWithRemoval;
}
private LockedApplication deleteUnreferencedDeploymentJobs(LockedApplication application) {
for (DeploymentJobs.JobType job : application.deploymentJobs().jobStatus().keySet()) {
Optional<ZoneId> zone = job.zone(controller.system());
if ( ! job.isProduction() || (zone.isPresent() && application.deploymentSpec().includes(zone.get().environment(), zone.map(ZoneId::region))))
continue;
application = application.withoutDeploymentJob(job);
}
return application;
}
/**
* Returns the existing triggering of the given type from this application,
* or an incomplete one created in this method if none is present
* This is needed (only) in the case where some external entity triggers a job.
*/
private JobStatus.JobRun getOrCreateTriggering(Application application, Version version, DeploymentJobs.JobType jobType) {
JobStatus status = application.deploymentJobs().jobStatus().get(jobType);
if (status == null) return incompleteTriggeringEvent(version);
if ( ! status.lastTriggered().isPresent()) return incompleteTriggeringEvent(version);
return status.lastTriggered().get();
}
private JobStatus.JobRun incompleteTriggeringEvent(Version version) {
return new JobStatus.JobRun(-1, version, ApplicationVersion.unknown, false, "", clock.instant());
}
private DeployOptions withVersion(Version version, DeployOptions options) {
return new DeployOptions(options.screwdriverBuildJob,
Optional.of(version),
options.ignoreValidationErrors,
options.deployCurrentVersion);
}
private ApplicationVersion toApplicationPackageRevision(ApplicationPackage applicationPackage,
Optional<ScrewdriverBuildJob> buildJob) {
if ( ! buildJob.isPresent())
return ApplicationVersion.from(applicationPackage.hash());
GitRevision gitRevision = buildJob.get().gitRevision;
if (gitRevision.repository == null || gitRevision.branch == null || gitRevision.commit == null)
return ApplicationVersion.from(applicationPackage.hash());
return ApplicationVersion.from(applicationPackage.hash(), new SourceRevision(gitRevision.repository.id(),
gitRevision.branch.id(),
gitRevision.commit.id()));
}
/** Register a DNS name for rotation */
private void registerRotationInDns(Rotation rotation, String dnsName) {
try {
Optional<Record> record = nameService.findRecord(Record.Type.CNAME, RecordName.from(dnsName));
RecordData rotationName = RecordData.fqdn(rotation.name());
if (record.isPresent()) {
if ( ! record.get().data().equals(rotationName)) {
nameService.updateRecord(record.get().id(), rotationName);
log.info("Updated mapping for record ID " + record.get().id().asString() + ": '" + dnsName
+ "' -> '" + rotation.name() + "'");
}
} else {
RecordId id = nameService.createCname(RecordName.from(dnsName), rotationName);
log.info("Registered mapping with record ID " + id.asString() + ": '" + dnsName + "' -> '"
+ rotation.name() + "'");
}
} catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to register CNAME", e);
}
}
/** Returns the endpoints of the deployment, or empty if obtaining them failed */
public Optional<InstanceEndpoints> getDeploymentEndpoints(DeploymentId deploymentId) {
try {
List<RoutingEndpoint> endpoints = routingGenerator.endpoints(deploymentId);
List<URI> endPointUrls = new ArrayList<>();
for (RoutingEndpoint endpoint : endpoints) {
try {
endPointUrls.add(new URI(endpoint.getEndpoint()));
} catch (URISyntaxException e) {
throw new RuntimeException("Routing generator returned illegal url's", e);
}
}
return Optional.of(new InstanceEndpoints(endPointUrls));
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to get endpoint information for " + deploymentId + ": "
+ Exceptions.toMessageString(e));
return Optional.empty();
}
}
/**
* Deletes the the given application. All known instances of the applications will be deleted,
* including PR instances.
*
* @throws IllegalArgumentException if the application has deployments or the caller is not authorized
* @throws NotExistsException if no instances of the application exist
*/
public void deleteApplication(ApplicationId applicationId, Optional<NToken> token) {
List<ApplicationId> instances = controller.applications().asList(applicationId.tenant())
.stream()
.map(Application::id)
.filter(id -> id.application().equals(applicationId.application()) &&
id.tenant().equals(applicationId.tenant()))
.collect(Collectors.toList());
if (instances.isEmpty()) {
throw new NotExistsException("Could not delete application '" + applicationId + "': Application not found");
}
instances.forEach(id -> lockOrThrow(id, application -> {
if ( ! application.deployments().isEmpty())
throw new IllegalArgumentException("Could not delete '" + application + "': It has active deployments");
Tenant tenant = controller.tenants().tenant(new TenantId(id.tenant().value())).get();
if (tenant.isAthensTenant() && ! token.isPresent())
throw new IllegalArgumentException("Could not delete '" + application + "': No NToken provided");
if (id.instance().isDefault() && tenant.isAthensTenant()) {
zmsClientFactory.createZmsClientWithAuthorizedServiceToken(token.get())
.deleteApplication(tenant.getAthensDomain().get(),
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
}
db.deleteApplication(id);
log.info("Deleted " + application);
}));
}
/**
* Replace any previous version of this application by this instance
*
* @param application a locked application to store
*/
public void store(LockedApplication application) {
db.store(application);
}
/**
* Acquire a locked application to modify and store, if there is an application with the given id.
*
* @param applicationId ID of the application to lock and get.
* @param action Function which acts on the locked application.
*/
public void lockIfPresent(ApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
get(applicationId).map(application -> new LockedApplication(application, lock)).ifPresent(action);
}
}
/**
* Acquire a locked application to modify and store, or throw an exception if no application has the given id.
*
* @param applicationId ID of the application to lock and require.
* @param action Function which acts on the locked application.
* @throws IllegalArgumentException when application does not exist.
*/
public void lockOrThrow(ApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
action.accept(new LockedApplication(require(applicationId), lock));
}
}
public void notifyJobCompletion(JobReport report) {
if ( ! get(report.applicationId()).isPresent()) {
log.log(Level.WARNING, "Ignoring completion of job of project '" + report.projectId() +
"': Unknown application '" + report.applicationId() + "'");
return;
}
deploymentTrigger.triggerFromCompletion(report);
}
/**
* Tells config server to schedule a restart of all nodes in this deployment
*
* @param hostname If non-empty, restart will only be scheduled for this host
*/
public void restart(DeploymentId deploymentId, Optional<Hostname> hostname) {
try {
configserverClient.restart(deploymentId, hostname);
}
catch (NoInstanceException e) {
throw new IllegalArgumentException("Could not restart " + deploymentId + ": No such deployment");
}
}
/** Deactivate application in the given zone */
public void deactivate(Application application, ZoneId zone) {
deactivate(application, zone, Optional.empty(), false);
}
/** Deactivate a known deployment of the given application */
public void deactivate(Application application, Deployment deployment, boolean requireThatDeploymentHasExpired) {
deactivate(application, deployment.zone(), Optional.of(deployment), requireThatDeploymentHasExpired);
}
private void deactivate(Application application, ZoneId zone, Optional<Deployment> deployment,
boolean requireThatDeploymentHasExpired) {
if (requireThatDeploymentHasExpired && deployment.isPresent()
&& ! DeploymentExpirer.hasExpired(controller.zoneRegistry(), deployment.get(), clock.instant()))
return;
lockOrThrow(application.id(), lockedApplication ->
store(deactivate(lockedApplication, zone)));
}
/**
* Deactivates a locked application without storing it
*
* @return the application with the deployment in the given zone removed
*/
private LockedApplication deactivate(LockedApplication application, ZoneId zone) {
try {
configserverClient.deactivate(new DeploymentId(application.id(), zone));
}
catch (NoInstanceException ignored) {
}
return application.withoutDeploymentIn(zone);
}
public DeploymentTrigger deploymentTrigger() { return deploymentTrigger; }
private ApplicationId dashToUnderscore(ApplicationId id) {
return ApplicationId.from(id.tenant().value(),
id.application().value().replaceAll("-", "_"),
id.instance().value());
}
public ConfigServerClient configserverClient() { return configserverClient; }
/**
* Returns a lock which provides exclusive rights to changing this application.
* Any operation which stores an application need to first acquire this lock, then read, modify
* and store the application, and finally release (close) the lock.
*/
Lock lock(ApplicationId application) {
return curator.lock(application, Duration.ofMinutes(10));
}
/** Returns whether a direct deployment to given zone is allowed */
private static boolean canDeployDirectlyTo(ZoneId zone, DeployOptions options) {
return ! options.screwdriverBuildJob.isPresent() ||
options.screwdriverBuildJob.get().screwdriverId == null ||
zone.environment().isManuallyDeployed();
}
/** Returns whether artifact for given version number is available in artifact repository */
private static boolean canDownloadArtifact(ApplicationVersion applicationVersion) {
return applicationVersion.buildNumber().isPresent() && applicationVersion.source().isPresent();
}
/** Returns whether component has reported a version number that is availabe in artifact repository */
private static boolean canDownloadReportedApplicationVersion(Application application) {
return Optional.ofNullable(application.deploymentJobs().jobStatus().get(DeploymentJobs.JobType.component))
.flatMap(JobStatus::lastSuccess)
.map(JobStatus.JobRun::applicationVersion)
.filter(ApplicationController::canDownloadArtifact)
.isPresent();
}
/** Verify that each of the production zones listed in the deployment spec exist in this system. */
private void validate(DeploymentSpec deploymentSpec) {
deploymentSpec.zones().stream()
.filter(zone -> zone.environment() == Environment.prod)
.forEach(zone -> {
if ( ! controller.zoneRegistry().hasZone(ZoneId.from(zone.environment(), zone.region().orElse(null))))
throw new IllegalArgumentException("Zone " + zone + " in deployment spec was not found in this system!");
});
}
public RotationRepository rotationRepository() {
return rotationRepository;
}
} |
Or even better to get rid of `annotCount` from `startLambdaFunctionDef` because Lambdas can't have annotations anyway. | void startWorker(PackageID pkgID) {
this.startLambdaFunctionDef(pkgID, 0);
BLangFunction lambdaFunction = (BLangFunction) this.invokableNodeStack.peek();
lambdaFunction.addFlag(Flag.WORKER);
this.startBlock();
} | this.startLambdaFunctionDef(pkgID, 0); | void startWorker(PackageID pkgID) {
this.startLambdaFunctionDef(pkgID);
BLangFunction lambdaFunction = (BLangFunction) this.invokableNodeStack.peek();
lambdaFunction.addFlag(Flag.WORKER);
this.startBlock();
} | class BLangPackageBuilder {
private CompilationUnitNode compUnit;
private Stack<BLangNameReference> nameReferenceStack = new Stack<>();
private Stack<TypeNode> typeNodeStack = new Stack<>();
private Stack<BlockNode> blockNodeStack = new Stack<>();
private Stack<BLangVariable> varStack = new Stack<>();
private Stack<List<BLangVariable>> varListStack = new Stack<>();
private Stack<List<BLangRecordVariableKeyValue>> recordVarListStack = new Stack<>();
private Stack<List<BLangRecordVarRefKeyValue>> recordVarRefListStack = new Stack<>();
private Stack<InvokableNode> invokableNodeStack = new Stack<>();
private Stack<ExpressionNode> exprNodeStack = new Stack<>();
private Stack<List<ExpressionNode>> exprNodeListStack = new Stack<>();
private Stack<Set<Whitespace>> commaWsStack = new Stack<>();
private Stack<Set<Whitespace>> invocationWsStack = new Stack<>();
private Stack<BLangRecordLiteral> recordLiteralNodes = new Stack<>();
private Stack<BLangTableLiteral> tableLiteralNodes = new Stack<>();
private Stack<BLangWaitForAllExpr> waitCollectionStack = new Stack<>();
private Stack<BLangTryCatchFinally> tryCatchFinallyNodesStack = new Stack<>();
private Stack<AnnotationNode> annotationStack = new Stack<>();
private Stack<MarkdownDocumentationNode> markdownDocumentationStack = new Stack<>();
private Stack<DeprecatedNode> deprecatedAttachmentStack = new Stack<>();
private Stack<AnnotationAttachmentNode> annotAttachmentStack = new Stack<>();
private Stack<IfNode> ifElseStatementStack = new Stack<>();
private Stack<TransactionNode> transactionNodeStack = new Stack<>();
private Stack<ForkJoinNode> forkJoinNodesStack = new Stack<>();
private Stack<ServiceNode> serviceNodeStack = new Stack<>();
private Stack<XMLAttributeNode> xmlAttributeNodeStack = new Stack<>();
private Stack<AttachPoint> attachPointStack = new Stack<>();
private Stack<OrderByNode> orderByClauseStack = new Stack<>();
private Stack<OrderByVariableNode> orderByVariableStack = new Stack<>();
private Stack<LimitNode> limitClauseStack = new Stack<>();
private Stack<GroupByNode> groupByClauseStack = new Stack<>();
private Stack<HavingNode> havingClauseStack = new Stack<>();
private Stack<WhereNode> whereClauseStack = new Stack<>();
private Stack<SelectExpressionNode> selectExpressionsStack = new Stack<>();
private Stack<List<SelectExpressionNode>> selectExpressionsListStack = new Stack<>();
private Stack<SelectClauseNode> selectClausesStack = new Stack<>();
private Stack<WindowClauseNode> windowClausesStack = new Stack<>();
private Stack<StreamingInput> streamingInputStack = new Stack<>();
private Stack<JoinStreamingInput> joinStreamingInputsStack = new Stack<>();
private Stack<TableQuery> tableQueriesStack = new Stack<>();
private Stack<SetAssignmentNode> setAssignmentStack = new Stack<>();
private Stack<List<SetAssignmentNode>> setAssignmentListStack = new Stack<>();
private Stack<StreamActionNode> streamActionNodeStack = new Stack<>();
private Stack<PatternStreamingEdgeInputNode> patternStreamingEdgeInputStack = new Stack<>();
private Stack<PatternStreamingInputNode> patternStreamingInputStack = new Stack<>();
private Stack<StreamingQueryStatementNode> streamingQueryStatementStack = new Stack<>();
private Stack<ForeverNode> foreverNodeStack = new Stack<>();
private Stack<OutputRateLimitNode> outputRateLimitStack = new Stack<>();
private Stack<WithinClause> withinClauseStack = new Stack<>();
private Stack<PatternClause> patternClauseStack = new Stack<>();
private Set<BLangImportPackage> imports = new HashSet<>();
private List<VariableDefinitionNode> defaultableParamsList = new ArrayList<>();
private Stack<SimpleVariableNode> restParamStack = new Stack<>();
private Deque<BLangMatch> matchStmtStack;
private PatternStreamingInputNode recentStreamingPatternInputNode;
private Stack<Set<Whitespace>> operatorWs = new Stack<>();
private Stack<Set<Whitespace>> objectFieldBlockWs = new Stack<>();
private Stack<Set<Whitespace>> finiteTypeWsStack = new Stack<>();
private BLangAnonymousModelHelper anonymousModelHelper;
private CompilerOptions compilerOptions;
private BLangDiagnosticLog dlog;
private static final String IDENTIFIER_LITERAL_PREFIX = "^\"";
private static final String IDENTIFIER_LITERAL_SUFFIX = "\"";
public BLangPackageBuilder(CompilerContext context, CompilationUnitNode compUnit) {
this.dlog = BLangDiagnosticLog.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.compilerOptions = CompilerOptions.getInstance(context);
this.compUnit = compUnit;
}
void addAttachPoint(AttachPoint attachPoint, Set<Whitespace> ws) {
attachPointStack.push(attachPoint);
this.annotationStack.peek().addWS(ws);
}
void addValueType(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.addWS(ws);
typeNode.pos = pos;
typeNode.typeKind = (TreeUtils.stringToTypeKind(typeName.replaceAll("\\s+", "")));
addType(typeNode);
}
void addUnionType(DiagnosticPos pos, Set<Whitespace> ws) {
BLangType rhsTypeNode = (BLangType) this.typeNodeStack.pop();
BLangType lhsTypeNode = (BLangType) this.typeNodeStack.pop();
BLangUnionTypeNode unionTypeNode;
if (rhsTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
unionTypeNode = (BLangUnionTypeNode) rhsTypeNode;
unionTypeNode.memberTypeNodes.add(0, lhsTypeNode);
unionTypeNode.addWS(ws);
this.typeNodeStack.push(unionTypeNode);
return;
} else {
unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(lhsTypeNode);
unionTypeNode.memberTypeNodes.add(rhsTypeNode);
}
unionTypeNode.pos = pos;
unionTypeNode.addWS(ws);
this.typeNodeStack.push(unionTypeNode);
}
void addTupleType(DiagnosticPos pos, Set<Whitespace> ws, int members) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
for (int i = 0; i < members; i++) {
final BLangType member = (BLangType) this.typeNodeStack.pop();
tupleTypeNode.memberTypeNodes.add(0, member);
}
tupleTypeNode.pos = pos;
tupleTypeNode.addWS(ws);
this.typeNodeStack.push(tupleTypeNode);
}
void addRecordType(DiagnosticPos pos, Set<Whitespace> ws, boolean isFieldAnalyseRequired, boolean isAnonymous,
boolean sealed, boolean hasRestField) {
BLangType restFieldType = null;
if (hasRestField && !sealed) {
restFieldType = (BLangType) this.typeNodeStack.pop();
}
BLangRecordTypeNode recordTypeNode = populateRecordTypeNode(pos, ws, isAnonymous);
recordTypeNode.isFieldAnalyseRequired = isFieldAnalyseRequired;
recordTypeNode.sealed = sealed;
recordTypeNode.restFieldType = restFieldType;
if (!isAnonymous) {
addType(recordTypeNode);
return;
}
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = recordTypeNode;
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
addType(createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name));
}
private BLangRecordTypeNode populateRecordTypeNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isAnonymous) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeNodeStack.pop();
recordTypeNode.pos = pos;
recordTypeNode.addWS(ws);
recordTypeNode.isAnonymous = isAnonymous;
this.varListStack.pop().forEach(variableNode -> {
recordTypeNode.addField((SimpleVariableNode) variableNode);
});
return recordTypeNode;
}
void addFieldVariable(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean exprAvailable, int annotCount, boolean isPrivate, boolean isOptional) {
BLangSimpleVariable field = addSimpleVar(pos, ws, identifier, exprAvailable, annotCount);
if (!isPrivate) {
field.flagSet.add(Flag.PUBLIC);
}
if (isOptional) {
field.flagSet.add(Flag.OPTIONAL);
} else if (!exprAvailable) {
field.flagSet.add(Flag.REQUIRED);
}
}
void addFieldVariable(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean exprAvailable, boolean deprecatedDocExit,
int annotCount, boolean isPrivate, boolean isPublic) {
BLangSimpleVariable field = addSimpleVar(pos, ws, identifier, exprAvailable, annotCount);
attachAnnotations(field, annotCount);
if (deprecatedDocExit) {
attachDeprecatedNode(field);
}
if (isPublic) {
field.flagSet.add(Flag.PUBLIC);
} else if (isPrivate) {
field.flagSet.add(Flag.PRIVATE);
}
}
void addArrayType(DiagnosticPos pos, Set<Whitespace> ws, int dimensions, int[] sizes) {
BLangType eType = (BLangType) this.typeNodeStack.pop();
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.addWS(ws);
arrayTypeNode.pos = pos;
arrayTypeNode.elemtype = eType;
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes;
addType(arrayTypeNode);
}
void markTypeNodeAsNullable(Set<Whitespace> ws) {
BLangType typeNode = (BLangType) this.typeNodeStack.peek();
typeNode.addWS(ws);
typeNode.nullable = true;
}
void markTypeNodeAsGrouped(Set<Whitespace> ws) {
BLangType typeNode = (BLangType) this.typeNodeStack.peek();
typeNode.addWS(ws);
typeNode.grouped = true;
}
void addUserDefineType(Set<Whitespace> ws) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangUserDefinedType userDefinedType = createUserDefinedType(nameReference.pos, ws,
(BLangIdentifier) nameReference.pkgAlias, (BLangIdentifier) nameReference.name);
userDefinedType.addWS(nameReference.ws);
addType(userDefinedType);
}
void addBuiltInReferenceType(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(typeName);
refType.pos = pos;
refType.addWS(ws);
addType(refType);
}
void addErrorType(DiagnosticPos pos, Set<Whitespace> ws, boolean isReasonTypeExists, boolean isDetailsTypeExists) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorType.pos = pos;
errorType.addWS(ws);
if (isDetailsTypeExists) {
errorType.detailType = (BLangType) this.typeNodeStack.pop();
}
if (isReasonTypeExists) {
errorType.reasonType = (BLangType) this.typeNodeStack.pop();
}
addType(errorType);
}
void addConstraintType(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangUserDefinedType constraintType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
constraintType.pos = pos;
constraintType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
constraintType.typeName = (BLangIdentifier) nameReference.name;
constraintType.addWS(nameReference.ws);
Set<Whitespace> refTypeWS = removeNthFromLast(ws, 2);
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(typeName);
refType.pos = pos;
refType.addWS(refTypeWS);
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = constraintType;
constrainedType.pos = pos;
constrainedType.addWS(ws);
addType(constrainedType);
}
void addConstraintTypeWithTypeName(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
Set<Whitespace> refTypeWS = removeNthFromLast(ws, 2);
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(typeName);
refType.pos = pos;
refType.addWS(refTypeWS);
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = (BLangType) this.typeNodeStack.pop();
constrainedType.pos = pos;
constrainedType.addWS(ws);
addType(constrainedType);
}
void addEndpointType(DiagnosticPos pos, Set<Whitespace> ws) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangUserDefinedType constraintType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
constraintType.pos = pos;
constraintType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
constraintType.typeName = (BLangIdentifier) nameReference.name;
constraintType.addWS(nameReference.ws);
addType(constraintType);
}
void addFunctionType(DiagnosticPos pos, Set<Whitespace> ws, boolean paramsAvail,
boolean retParamsAvail) {
BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode();
functionTypeNode.pos = pos;
functionTypeNode.returnsKeywordExists = true;
if (retParamsAvail) {
functionTypeNode.addWS(this.varStack.peek().getWS());
functionTypeNode.returnTypeNode = this.varStack.pop().getTypeNode();
} else {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = pos;
nilTypeNode.typeKind = TypeKind.NIL;
functionTypeNode.returnTypeNode = nilTypeNode;
}
if (paramsAvail) {
functionTypeNode.addWS(commaWsStack.pop());
this.varListStack.pop().forEach(v -> functionTypeNode.params.add(v));
}
functionTypeNode.addWS(ws);
addType(functionTypeNode);
}
private void addType(TypeNode typeNode) {
this.typeNodeStack.push(typeNode);
}
void addNameReference(DiagnosticPos currentPos, Set<Whitespace> ws, String pkgName, String name) {
IdentifierNode pkgNameNode = createIdentifier(pkgName);
IdentifierNode nameNode = createIdentifier(name);
nameReferenceStack.push(new BLangNameReference(currentPos, ws, pkgNameNode, nameNode));
}
void startVarList() {
this.varListStack.push(new ArrayList<>());
}
void startFunctionDef(int annotCount) {
FunctionNode functionNode = TreeBuilder.createFunctionNode();
attachAnnotations(functionNode, annotCount);
attachMarkdownDocumentations(functionNode);
attachDeprecatedNode(functionNode);
this.invokableNodeStack.push(functionNode);
}
void startObjectFunctionDef() {
FunctionNode functionNode = TreeBuilder.createFunctionNode();
this.invokableNodeStack.push(functionNode);
}
void startBlock() {
this.blockNodeStack.push(TreeBuilder.createBlockNode());
}
private IdentifierNode createIdentifier(String value) {
IdentifierNode node = TreeBuilder.createIdentifierNode();
if (value == null) {
return node;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX) && value.endsWith(IDENTIFIER_LITERAL_SUFFIX)) {
value = StringEscapeUtils.unescapeJava(value);
node.setValue(value.substring(2, value.length() - 1));
node.setLiteral(true);
} else {
node.setValue(value);
node.setLiteral(false);
}
return node;
}
BLangSimpleVariable addSimpleVar(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier,
boolean exprAvailable,
int annotCount) {
BLangSimpleVariable var = (BLangSimpleVariable) this.generateBasicVarNode(pos, ws, identifier, exprAvailable);
attachAnnotations(var, annotCount);
var.pos = pos;
if (this.varListStack.empty()) {
this.varStack.push(var);
} else {
this.varListStack.peek().add(var);
}
return var;
}
BLangVariable addBindingPatternMemberVariable(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier) {
BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
memberVar.pos = pos;
IdentifierNode name = this.createIdentifier(identifier);
memberVar.setName(name);
memberVar.addWS(ws);
this.varStack.push(memberVar);
return memberVar;
}
void addTupleVariable(DiagnosticPos pos, Set<Whitespace> ws, int members) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode();
tupleVariable.pos = pos;
tupleVariable.addWS(ws);
for (int i = 0; i < members; i++) {
final BLangVariable member = this.varStack.pop();
tupleVariable.memberVariables.add(0, member);
}
this.varStack.push(tupleVariable);
}
void addTupleVariableReference(DiagnosticPos pos, Set<Whitespace> ws, int members) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode();
tupleVarRef.pos = pos;
tupleVarRef.addWS(ws);
for (int i = 0; i < members; i++) {
final BLangExpression expr = (BLangExpression) this.exprNodeStack.pop();
tupleVarRef.expressions.add(0, expr);
}
this.exprNodeStack.push(tupleVarRef);
}
void startRecordVariableList() {
recordVarListStack.push(new ArrayList<>());
}
void startRecordVariableReferenceList() {
recordVarRefListStack.push(new ArrayList<>());
}
void addRecordVariable(DiagnosticPos pos, Set<Whitespace> ws, RestBindingPatternState restBindingPattern) {
BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode();
recordVariable.pos = pos;
recordVariable.addWS(ws);
recordVariable.variableList = this.recordVarListStack.pop();
switch (restBindingPattern) {
case OPEN_REST_BINDING_PATTERN:
recordVariable.restParam = this.varStack.pop();
break;
case CLOSED_REST_BINDING_PATTERN:
recordVariable.isClosed = true;
break;
case NO_BINDING_PATTERN:
break;
}
this.varStack.push(recordVariable);
}
void addRecordVariableReference(DiagnosticPos pos, Set<Whitespace> ws, RestBindingPatternState restBindingPattern) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode();
recordVarRef.pos = pos;
recordVarRef.addWS(ws);
switch (restBindingPattern) {
case OPEN_REST_BINDING_PATTERN:
recordVarRef.restParam = this.exprNodeStack.pop();
break;
case CLOSED_REST_BINDING_PATTERN:
recordVarRef.isClosed = true;
break;
case NO_BINDING_PATTERN:
break;
}
recordVarRef.recordRefFields = this.recordVarRefListStack.pop();
this.exprNodeStack.push(recordVarRef);
}
void addFieldBindingMemberVar(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean bindingPattern) {
BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue();
recordKeyValue.key = (BLangIdentifier) this.createIdentifier(identifier);
if (!bindingPattern) {
addBindingPatternMemberVariable(pos, ws, identifier);
}
recordKeyValue.valueBindingPattern = this.varStack.pop();
this.recordVarListStack.peek().add(recordKeyValue);
}
void addFieldRefBindingMemberVar(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean bindingPattern) {
BLangExpression expression;
if (!bindingPattern) {
addNameReference(pos, ws, null, identifier);
createSimpleVariableReference(pos, ws);
}
expression = (BLangExpression) this.exprNodeStack.pop();
BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue();
keyValue.variableName = (BLangIdentifier) createIdentifier(identifier);
keyValue.variableReference = expression;
this.recordVarRefListStack.peek().add(keyValue);
}
public BLangVariable addVarWithoutType(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier,
boolean exprAvailable,
int annotCount) {
BLangVariable var = (BLangVariable) this.generateBasicVarNodeWithoutType(pos, ws, identifier, exprAvailable);
attachAnnotations(var, annotCount);
var.pos = pos;
if (this.varListStack.empty()) {
this.varStack.push(var);
} else {
this.varListStack.peek().add(var);
}
return var;
}
public void endFormalParameterList(Set<Whitespace> ws) {
this.commaWsStack.push(ws);
}
void addReturnParam(DiagnosticPos pos,
Set<Whitespace> ws,
int annotCount) {
BLangSimpleVariable var = (BLangSimpleVariable) this.generateBasicVarNode(pos, ws, null, false);
attachAnnotations(var, annotCount);
var.pos = pos;
this.varStack.push(var);
}
void endCallableUnitSignature(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier,
DiagnosticPos identifierPos,
boolean paramsAvail,
boolean retParamsAvail,
boolean restParamAvail) {
InvokableNode invNode = this.invokableNodeStack.peek();
BLangIdentifier identifierNode = (BLangIdentifier) this.createIdentifier(identifier);
identifierNode.pos = identifierPos;
invNode.setName(identifierNode);
invNode.addWS(ws);
BLangType returnTypeNode;
if (retParamsAvail) {
BLangSimpleVariable varNode = (BLangSimpleVariable) this.varStack.pop();
returnTypeNode = varNode.getTypeNode();
invNode.addWS(varNode.getWS());
varNode.getAnnotationAttachments().forEach(invNode::addReturnTypeAnnotationAttachment);
} else {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.pos = pos;
nillTypeNode.typeKind = TypeKind.NIL;
returnTypeNode = nillTypeNode;
}
invNode.setReturnTypeNode(returnTypeNode);
if (paramsAvail) {
this.varListStack.pop().forEach(variableNode -> {
invNode.addParameter((SimpleVariableNode) variableNode);
});
this.defaultableParamsList.forEach(variableDef -> {
BLangSimpleVariableDef varDef = (BLangSimpleVariableDef) variableDef;
invNode.addDefaultableParameter(varDef);
});
this.defaultableParamsList = new ArrayList<>();
if (restParamAvail) {
invNode.setRestParameter(this.restParamStack.pop());
}
invNode.addWS(this.commaWsStack.pop());
}
}
void startLambdaFunctionDef(PackageID pkgID, int annotCount) {
startFunctionDef(annotCount);
BLangFunction lambdaFunction = (BLangFunction) this.invokableNodeStack.peek();
lambdaFunction.setName(createIdentifier(anonymousModelHelper.getNextAnonymousFunctionKey(pkgID)));
lambdaFunction.addFlag(Flag.LAMBDA);
}
void addLambdaFunctionDef(DiagnosticPos pos,
Set<Whitespace> ws,
boolean paramsAvail,
boolean retParamsAvail,
boolean restParamAvail) {
BLangFunction lambdaFunction = (BLangFunction) this.invokableNodeStack.peek();
lambdaFunction.pos = pos;
endCallableUnitSignature(pos, ws, lambdaFunction.getName().value, pos, paramsAvail, retParamsAvail,
restParamAvail);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = lambdaFunction;
lambdaExpr.pos = pos;
addExpressionNode(lambdaExpr);
endFunctionDef(pos, null, false, false, false, true, false, true);
}
void addArrowFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, PackageID pkgID) {
BLangArrowFunction arrowFunctionNode = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode();
arrowFunctionNode.pos = pos;
arrowFunctionNode.addWS(ws);
arrowFunctionNode.functionName = createIdentifier(anonymousModelHelper.getNextAnonymousFunctionKey(pkgID));
varListStack.pop().forEach(var -> arrowFunctionNode.params.add((BLangSimpleVariable) var));
arrowFunctionNode.expression = (BLangExpression) this.exprNodeStack.pop();
addExpressionNode(arrowFunctionNode);
}
void markLastInvocationAsAsync(DiagnosticPos pos) {
final ExpressionNode expressionNode = this.exprNodeStack.peek();
if (expressionNode.getKind() == NodeKind.INVOCATION) {
((BLangInvocation) this.exprNodeStack.peek()).async = true;
} else {
dlog.error(pos, DiagnosticCode.START_REQUIRE_INVOCATION);
}
}
void addSimpleVariableDefStatement(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean isFinal,
boolean isDeclaredWithVar, boolean isExpressionAvailable) {
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
BLangSimpleVariableDef varDefNode = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
var.pos = pos;
var.addWS(ws);
var.setName(this.createIdentifier(identifier));
if (isFinal) {
markVariableAsFinal(var);
}
if (isDeclaredWithVar) {
var.isDeclaredWithVar = true;
} else {
var.setTypeNode(this.typeNodeStack.pop());
}
if (isExpressionAvailable) {
var.setInitialExpression(this.exprNodeStack.pop());
}
varDefNode.pos = pos;
varDefNode.setVariable(var);
varDefNode.addWS(ws);
addStmtToCurrentBlock(varDefNode);
}
void addTupleVariableDefStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean isFinal,
boolean isDeclaredWithVar) {
BLangTupleVariable var = (BLangTupleVariable) this.varStack.pop();
if (isFinal) {
markVariableAsFinal(var);
}
BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode();
Set<Whitespace> wsOfSemiColon = removeNthFromLast(ws, 0);
var.setInitialExpression(this.exprNodeStack.pop());
varDefNode.pos = pos;
varDefNode.setVariable(var);
varDefNode.addWS(wsOfSemiColon);
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(this.typeNodeStack.pop());
}
addStmtToCurrentBlock(varDefNode);
}
void addRecordVariableDefStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean isFinal,
boolean isDeclaredWithVar) {
BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode();
BLangRecordVariable var = (BLangRecordVariable) this.varStack.pop();
if (isFinal) {
markVariableAsFinal(var);
}
var.setInitialExpression(this.exprNodeStack.pop());
varDefNode.pos = pos;
varDefNode.setVariable(var);
varDefNode.addWS(ws);
varDefNode.var = var;
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(this.typeNodeStack.pop());
}
addStmtToCurrentBlock(varDefNode);
}
void addTypeInitExpression(DiagnosticPos pos, Set<Whitespace> ws, String initName, boolean typeAvailable,
boolean exprAvailable) {
BLangTypeInit objectInitNode = (BLangTypeInit) TreeBuilder.createObjectInitNode();
objectInitNode.pos = pos;
objectInitNode.addWS(ws);
if (typeAvailable) {
objectInitNode.userDefinedType = (BLangUserDefinedType) typeNodeStack.pop();
}
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
invocationNode.addWS(ws);
if (exprAvailable) {
List<ExpressionNode> exprNodes = exprNodeListStack.pop();
Set<Whitespace> cws = commaWsStack.pop();
exprNodes.forEach(exprNode -> {
invocationNode.argExprs.add((BLangExpression) exprNode);
objectInitNode.argsExpr.add((BLangExpression) exprNode);
});
invocationNode.addWS(cws);
objectInitNode.addWS(cws);
}
IdentifierNode pkgNameNode = TreeBuilder.createIdentifierNode();
IdentifierNode nameNode = createIdentifier(initName);
BLangNameReference nameReference = new BLangNameReference(pos, ws, pkgNameNode, nameNode);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.addWS(nameReference.ws);
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
objectInitNode.objectInitInvocation = invocationNode;
this.addExpressionNode(objectInitNode);
}
private void markVariableAsFinal(BLangVariable variable) {
variable.flagSet.add(Flag.FINAL);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
((BLangTupleVariable) variable).memberVariables.parallelStream()
.forEach(this::markVariableAsFinal);
break;
case RECORD_VARIABLE:
((BLangRecordVariable) variable).variableList.parallelStream()
.map(BLangRecordVariableKeyValue::getValue)
.forEach(this::markVariableAsFinal);
break;
}
}
void addErrorConstructor(DiagnosticPos pos, Set<Whitespace> ws, boolean detailsExprAvailable) {
BLangErrorConstructorExpr errorConstExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorNode();
errorConstExpr.pos = pos;
errorConstExpr.addWS(ws);
if (detailsExprAvailable) {
errorConstExpr.detailsExpr = (BLangExpression) exprNodeStack.pop();
}
errorConstExpr.reasonExpr = (BLangExpression) exprNodeStack.pop();
this.addExpressionNode(errorConstExpr);
}
private void addStmtToCurrentBlock(StatementNode statement) {
this.blockNodeStack.peek().addStatement(statement);
}
void startTryCatchFinallyStmt() {
this.tryCatchFinallyNodesStack.push((BLangTryCatchFinally) TreeBuilder.createTryCatchFinallyNode());
startBlock();
}
void addTryClause(DiagnosticPos pos) {
BLangBlockStmt tryBlock = (BLangBlockStmt) this.blockNodeStack.pop();
tryBlock.pos = pos;
tryCatchFinallyNodesStack.peek().tryBody = tryBlock;
}
void startCatchClause() {
startBlock();
}
void addCatchClause(DiagnosticPos poc, Set<Whitespace> ws, String paramName) {
BLangSimpleVariable variableNode = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
variableNode.typeNode = (BLangType) this.typeNodeStack.pop();
variableNode.name = (BLangIdentifier) createIdentifier(paramName);
variableNode.pos = variableNode.typeNode.pos;
variableNode.addWS(removeNthFromLast(ws, 3));
BLangCatch catchNode = (BLangCatch) TreeBuilder.createCatchNode();
catchNode.pos = poc;
catchNode.addWS(ws);
catchNode.body = (BLangBlockStmt) this.blockNodeStack.pop();
catchNode.param = variableNode;
tryCatchFinallyNodesStack.peek().catchBlocks.add(catchNode);
}
void startFinallyBlock() {
startBlock();
}
void addFinallyBlock(DiagnosticPos poc, Set<Whitespace> ws) {
BLangBlockStmt blockNode = (BLangBlockStmt) this.blockNodeStack.pop();
BLangTryCatchFinally rootTry = tryCatchFinallyNodesStack.peek();
rootTry.finallyBody = blockNode;
rootTry.addWS(ws);
blockNode.pos = poc;
}
void addTryCatchFinallyStmt(DiagnosticPos poc, Set<Whitespace> ws) {
BLangTryCatchFinally stmtNode = tryCatchFinallyNodesStack.pop();
stmtNode.pos = poc;
stmtNode.addWS(ws);
addStmtToCurrentBlock(stmtNode);
}
void addThrowStmt(DiagnosticPos poc, Set<Whitespace> ws) {
ExpressionNode throwExpr = this.exprNodeStack.pop();
BLangThrow throwNode = (BLangThrow) TreeBuilder.createThrowNode();
throwNode.pos = poc;
throwNode.addWS(ws);
throwNode.expr = (BLangExpression) throwExpr;
addStmtToCurrentBlock(throwNode);
}
void addPanicStmt(DiagnosticPos poc, Set<Whitespace> ws) {
ExpressionNode errorExpr = this.exprNodeStack.pop();
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = poc;
panicNode.addWS(ws);
panicNode.expr = (BLangExpression) errorExpr;
addStmtToCurrentBlock(panicNode);
}
private void addExpressionNode(ExpressionNode expressionNode) {
this.exprNodeStack.push(expressionNode);
}
void addLiteralValue(DiagnosticPos pos, Set<Whitespace> ws, int typeTag, Object value) {
addLiteralValue(pos, ws, typeTag, value, String.valueOf(value));
}
void addLiteralValue(DiagnosticPos pos, Set<Whitespace> ws, int typeTag, Object value, String originalValue) {
BLangLiteral litExpr = (BLangLiteral) TreeBuilder.createLiteralExpression();
litExpr.addWS(ws);
litExpr.pos = pos;
litExpr.typeTag = typeTag;
litExpr.value = value;
litExpr.originalValue = originalValue;
addExpressionNode(litExpr);
}
void addArrayInitExpr(DiagnosticPos pos, Set<Whitespace> ws, boolean argsAvailable) {
List<ExpressionNode> argExprList;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralNode();
if (argsAvailable) {
arrayLiteral.addWS(commaWsStack.pop());
argExprList = exprNodeListStack.pop();
} else {
argExprList = new ArrayList<>(0);
}
arrayLiteral.exprs = argExprList.stream().map(expr -> (BLangExpression) expr).collect(Collectors.toList());
arrayLiteral.pos = pos;
arrayLiteral.addWS(ws);
addExpressionNode(arrayLiteral);
}
void addKeyValueRecord(Set<Whitespace> ws) {
BLangRecordKeyValue keyValue = (BLangRecordKeyValue) TreeBuilder.createRecordKeyValue();
keyValue.addWS(ws);
keyValue.valueExpr = (BLangExpression) exprNodeStack.pop();
keyValue.key = new BLangRecordKey((BLangExpression) exprNodeStack.pop());
recordLiteralNodes.peek().keyValuePairs.add(keyValue);
}
void addMapStructLiteral(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRecordLiteral recordTypeLiteralNode = recordLiteralNodes.pop();
recordTypeLiteralNode.pos = pos;
recordTypeLiteralNode.addWS(ws);
addExpressionNode(recordTypeLiteralNode);
}
void startTableLiteral() {
final BLangTableLiteral tableLiteral = (BLangTableLiteral) TreeBuilder.createTableLiteralNode();
tableLiteralNodes.push(tableLiteral);
}
void endTableColumnDefinition(Set<Whitespace> ws) {
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
tableLiteral.addWS(ws);
}
void addTableColumn(String columnName, DiagnosticPos pos, Set<Whitespace> ws) {
BLangTableLiteral.BLangTableColumn tableColumn = new BLangTableLiteral.BLangTableColumn(columnName);
tableColumn.pos = pos;
tableColumn.addWS(ws);
this.tableLiteralNodes.peek().columns.add(tableColumn);
}
void markPrimaryKeyColumn(String columnName) {
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
BLangTableLiteral.BLangTableColumn column = tableLiteral.getColumn(columnName);
if (column != null) {
column.flagSet.add(TableColumnFlag.PRIMARYKEY);
}
}
void endTableDataList(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
List<BLangTableLiteral.BLangTableColumn> keyNames = tableLiteralNodes.peek().columns;
List<ExpressionNode> recordValues = exprNodeListStack.pop();
if (keyNames.size() == recordValues.size()) {
int index = 0;
for (ExpressionNode expr : recordValues) {
BLangRecordKeyValue keyValue = (BLangRecordKeyValue) TreeBuilder.createRecordKeyValue();
keyValue.valueExpr = (BLangExpression) expr;
BLangSimpleVarRef keyExpr = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
keyExpr.pos = pos;
IdentifierNode identifierNode = TreeBuilder.createIdentifierNode();
identifierNode.setValue(keyNames.get(index).columnName);
keyExpr.variableName = (BLangIdentifier) identifierNode;
keyValue.key = new BLangRecordKey(keyExpr);
recordLiteral.keyValuePairs.add(keyValue);
++index;
}
recordLiteral.addWS(ws);
recordLiteral.pos = pos;
if (commaWsStack.size() > 0) {
recordLiteral.addWS(commaWsStack.pop());
}
this.tableLiteralNodes.peek().tableDataRows.add(recordLiteral);
}
}
void endTableDataArray(Set<Whitespace> ws) {
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
tableLiteral.addWS(ws);
}
void endTableDataRow(Set<Whitespace> ws) {
List<ExpressionNode> argExprList = exprNodeListStack.pop();
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
tableLiteral.addWS(ws);
if (commaWsStack.size() > 0) {
tableLiteral.addWS(commaWsStack.pop());
}
tableLiteral.tableDataRows = argExprList.stream().map(expr -> (BLangExpression) expr)
.collect(Collectors.toList());
}
void addTableLiteral(DiagnosticPos pos, Set<Whitespace> ws) {
final BLangTableLiteral tableLiteral = tableLiteralNodes.pop();
tableLiteral.addWS(ws);
tableLiteral.pos = pos;
addExpressionNode(tableLiteral);
}
void startMapStructLiteral() {
BLangRecordLiteral literalNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
recordLiteralNodes.push(literalNode);
}
void startExprNodeList() {
this.exprNodeListStack.push(new ArrayList<>());
}
void endExprNodeList(Set<Whitespace> ws, int exprCount) {
commaWsStack.push(ws);
List<ExpressionNode> exprList = exprNodeListStack.peek();
addExprToExprNodeList(exprList, exprCount);
}
private void addExprToExprNodeList(List<ExpressionNode> exprList, int n) {
if (exprNodeStack.empty()) {
throw new IllegalStateException("Expression stack cannot be empty in processing an ExpressionList");
}
ExpressionNode expr = exprNodeStack.pop();
if (n > 1) {
addExprToExprNodeList(exprList, n - 1);
}
exprList.add(expr);
}
void createSimpleVariableReference(DiagnosticPos pos, Set<Whitespace> ws) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder
.createSimpleVariableReferenceNode();
varRef.pos = pos;
varRef.addWS(ws);
varRef.addWS(nameReference.ws);
varRef.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
varRef.variableName = (BLangIdentifier) nameReference.name;
this.exprNodeStack.push(varRef);
}
void createFunctionInvocation(DiagnosticPos pos, Set<Whitespace> ws, boolean argsAvailable) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
invocationNode.addWS(ws);
if (argsAvailable) {
List<ExpressionNode> exprNodes = exprNodeListStack.pop();
exprNodes.forEach(exprNode -> invocationNode.argExprs.add((BLangExpression) exprNode));
invocationNode.addWS(commaWsStack.pop());
}
BLangNameReference nameReference = nameReferenceStack.pop();
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.addWS(this.invocationWsStack.pop());
invocationNode.addWS(nameReference.ws);
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
addExpressionNode(invocationNode);
}
void startInvocationNode(Set<Whitespace> ws) {
invocationWsStack.push(ws);
}
void createInvocationNode(DiagnosticPos pos, Set<Whitespace> ws, String invocation, boolean argsAvailable,
boolean safeNavigate) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
invocationNode.addWS(ws);
invocationNode.addWS(invocationWsStack.pop());
invocationNode.safeNavigate = safeNavigate;
if (argsAvailable) {
List<ExpressionNode> exprNodes = exprNodeListStack.pop();
exprNodes.forEach(exprNode -> invocationNode.argExprs.add((BLangExpression) exprNode));
invocationNode.addWS(commaWsStack.pop());
}
invocationNode.expr = (BLangExpression) exprNodeStack.pop();
invocationNode.name = (BLangIdentifier) createIdentifier(invocation);
invocationNode.pkgAlias = (BLangIdentifier) createIdentifier(null);
addExpressionNode(invocationNode);
}
void createActionInvocationNode(DiagnosticPos pos, Set<Whitespace> ws, boolean async) {
BLangInvocation invocationExpr = (BLangInvocation) exprNodeStack.pop();
invocationExpr.actionInvocation = true;
invocationExpr.pos = pos;
invocationExpr.addWS(ws);
invocationExpr.async = async;
invocationExpr.expr = (BLangExpression) exprNodeStack.pop();
exprNodeStack.push(invocationExpr);
}
void createFieldBasedAccessNode(DiagnosticPos pos, Set<Whitespace> ws, String fieldName,
FieldKind fieldType, boolean safeNavigate) {
BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
fieldBasedAccess.pos = pos;
fieldBasedAccess.addWS(ws);
fieldBasedAccess.field = (BLangIdentifier) createIdentifier(fieldName);
fieldBasedAccess.expr = (BLangVariableReference) exprNodeStack.pop();
fieldBasedAccess.fieldKind = fieldType;
fieldBasedAccess.safeNavigate = safeNavigate;
addExpressionNode(fieldBasedAccess);
}
void createIndexBasedAccessNode(DiagnosticPos pos, Set<Whitespace> ws) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = pos;
indexBasedAccess.addWS(ws);
indexBasedAccess.indexExpr = (BLangExpression) exprNodeStack.pop();
indexBasedAccess.expr = (BLangVariableReference) exprNodeStack.pop();
addExpressionNode(indexBasedAccess);
}
void createBracedOrTupleExpression(DiagnosticPos pos, Set<Whitespace> ws, int numberOfExpressions) {
final BLangBracedOrTupleExpr expr = (BLangBracedOrTupleExpr) TreeBuilder.createBracedOrTupleExpression();
expr.pos = pos;
expr.addWS(ws);
for (int i = 0; i < numberOfExpressions; i++) {
expr.expressions.add(0, (BLangExpression) exprNodeStack.pop());
}
addExpressionNode(expr);
}
void createBinaryExpr(DiagnosticPos pos, Set<Whitespace> ws, String operator) {
BLangBinaryExpr binaryExpressionNode = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpressionNode.pos = pos;
binaryExpressionNode.addWS(ws);
binaryExpressionNode.rhsExpr = (BLangExpression) exprNodeStack.pop();
binaryExpressionNode.lhsExpr = (BLangExpression) exprNodeStack.pop();
binaryExpressionNode.opKind = OperatorKind.valueFrom(operator);
addExpressionNode(binaryExpressionNode);
}
void createElvisExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode();
elvisExpr.pos = pos;
elvisExpr.addWS(ws);
elvisExpr.rhsExpr = (BLangExpression) exprNodeStack.pop();
elvisExpr.lhsExpr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(elvisExpr);
}
void createTypeAccessExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode();
typeAccessExpr.pos = pos;
typeAccessExpr.addWS(ws);
typeAccessExpr.typeNode = (BLangType) typeNodeStack.pop();
addExpressionNode(typeAccessExpr);
}
void createTypeConversionExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = pos;
typeConversionNode.addWS(ws);
typeConversionNode.typeNode = (BLangType) typeNodeStack.pop();
typeConversionNode.expr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(typeConversionNode);
}
void createUnaryExpr(DiagnosticPos pos, Set<Whitespace> ws, String operator) {
BLangUnaryExpr unaryExpressionNode = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
unaryExpressionNode.pos = pos;
unaryExpressionNode.addWS(ws);
unaryExpressionNode.expr = (BLangExpression) exprNodeStack.pop();
unaryExpressionNode.operator = OperatorKind.valueFrom(operator);
addExpressionNode(unaryExpressionNode);
}
void createTernaryExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode();
ternaryExpr.pos = pos;
ternaryExpr.addWS(ws);
ternaryExpr.elseExpr = (BLangExpression) exprNodeStack.pop();
ternaryExpr.thenExpr = (BLangExpression) exprNodeStack.pop();
ternaryExpr.expr = (BLangExpression) exprNodeStack.pop();
if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr;
BLangTernaryExpr parent = root;
while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) {
parent = (BLangTernaryExpr) parent.elseExpr;
}
ternaryExpr.expr = parent.elseExpr;
parent.elseExpr = ternaryExpr;
ternaryExpr = root;
}
addExpressionNode(ternaryExpr);
}
void createCheckedExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.addWS(ws);
checkedExpr.expr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(checkedExpr);
}
void createTrapExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.pos = pos;
trapExpr.addWS(ws);
trapExpr.expr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(trapExpr);
}
void endFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean publicFunc, boolean remoteFunc,
boolean nativeFunc, boolean bodyExists, boolean isReceiverAttached, boolean isLambda) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
if (!isLambda) {
function.addWS(invocationWsStack.pop());
}
if (publicFunc) {
function.flagSet.add(Flag.PUBLIC);
}
if (remoteFunc) {
function.flagSet.add(Flag.REMOTE);
}
if (nativeFunc) {
function.flagSet.add(Flag.NATIVE);
}
if (!bodyExists) {
function.body = null;
}
if (isReceiverAttached) {
TypeNode typeNode = this.typeNodeStack.pop();
BLangSimpleVariable receiver = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
receiver.pos = pos;
IdentifierNode name = createIdentifier(Names.SELF.getValue());
receiver.setName(name);
receiver.setTypeNode(typeNode);
function.receiver = receiver;
function.flagSet.add(Flag.ATTACHED);
}
if (!function.deprecatedAttachments.isEmpty()) {
function.flagSet.add(Flag.DEPRECATED);
}
this.compUnit.addTopLevelNode(function);
}
void addWorker(DiagnosticPos pos, Set<Whitespace> ws, String workerName, boolean retParamsAvail) {
endCallableUnitBody(ws);
addLambdaFunctionDef(pos, ws, false, retParamsAvail, false);
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
addSimpleVariableDefStatement(pos, ws, workerLambdaName, true, true, true);
if (!this.forkJoinNodesStack.empty()) {
List<? extends StatementNode> stmtsAdded = this.blockNodeStack.peek().getStatements();
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) stmtsAdded.get(stmtsAdded.size() - 1);
this.forkJoinNodesStack.peek().addWorkers(lamdaWrkr);
}
addNameReference(pos, ws, null, workerLambdaName);
createSimpleVariableReference(pos, ws);
startInvocationNode(ws);
createInvocationNode(pos, ws, BLangBuiltInMethod.CALL.toString(), false, false);
markLastInvocationAsAsync(pos);
addSimpleVariableDefStatement(pos, ws, workerName, true, true, true);
}
void attachWorkerWS(Set<Whitespace> ws) {
}
void startForkJoinStmt() {
this.forkJoinNodesStack.push(TreeBuilder.createForkJoinNode());
}
void addForkJoinStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangForkJoin forkJoin = (BLangForkJoin) this.forkJoinNodesStack.pop();
forkJoin.pos = pos;
forkJoin.addWS(ws);
this.addStmtToCurrentBlock(forkJoin);
}
void endCallableUnitBody(Set<Whitespace> ws) {
BlockNode block = this.blockNodeStack.pop();
InvokableNode invokableNode = this.invokableNodeStack.peek();
invokableNode.addWS(ws);
invokableNode.setBody(block);
}
void addImportPackageDeclaration(DiagnosticPos pos,
Set<Whitespace> ws,
String orgName,
List<String> nameComps,
String version,
String alias) {
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
nameComps.forEach(e -> pkgNameComps.add((BLangIdentifier) this.createIdentifier(e)));
BLangIdentifier versionNode = (BLangIdentifier) this.createIdentifier(version);
BLangIdentifier aliasNode = (alias != null && !alias.isEmpty()) ?
(BLangIdentifier) this.createIdentifier(alias) :
pkgNameComps.get(pkgNameComps.size() - 1);
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = pos;
importDcl.addWS(ws);
importDcl.pkgNameComps = pkgNameComps;
importDcl.version = versionNode;
importDcl.orgName = (BLangIdentifier) this.createIdentifier(orgName);
importDcl.alias = aliasNode;
this.compUnit.addTopLevelNode(importDcl);
if (this.imports.contains(importDcl)) {
this.dlog.warning(pos, DiagnosticCode.REDECLARED_IMPORT_MODULE, importDcl.getQualifiedPackageName());
} else {
this.imports.add(importDcl);
}
}
private VariableNode generateBasicVarNodeWithoutType(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isExpressionAvailable) {
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
var.pos = pos;
IdentifierNode name = this.createIdentifier(identifier);
var.setName(name);
var.addWS(ws);
if (isExpressionAvailable) {
var.setInitialExpression(this.exprNodeStack.pop());
}
return var;
}
private LiteralNode generateConstantNode(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isTypeAvailable) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
constantNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.value = identifier;
constantNode.setName(name);
constantNode.addWS(ws);
if (isTypeAvailable) {
constantNode.setTypeNode(this.typeNodeStack.pop());
}
constantNode.setValue(this.exprNodeStack.pop());
return constantNode;
}
private VariableNode generateBasicVarNode(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isExpressionAvailable) {
return generateBasicVarNode(pos, ws, identifier, false, isExpressionAvailable);
}
private VariableNode generateBasicVarNode(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isDeclaredWithVar, boolean isExpressionAvailable) {
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
var.pos = pos;
IdentifierNode name = this.createIdentifier(identifier);
var.setName(name);
var.addWS(ws);
if (isDeclaredWithVar) {
var.isDeclaredWithVar = true;
} else {
var.setTypeNode(this.typeNodeStack.pop());
}
if (isExpressionAvailable) {
var.setInitialExpression(this.exprNodeStack.pop());
}
return var;
}
void addConstant(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean isPublic,
boolean isTypeAvailable) {
BLangConstant constantNode = (BLangConstant) this.generateConstantNode(pos, ws, identifier, isTypeAvailable);
attachAnnotations(constantNode);
constantNode.flagSet.add(Flag.CONSTANT);
if (isPublic) {
constantNode.flagSet.add(Flag.PUBLIC);
}
attachMarkdownDocumentations(constantNode);
attachDeprecatedNode(constantNode);
this.compUnit.addTopLevelNode(constantNode);
if (((BLangExpression) constantNode.value).getKind() == NodeKind.LITERAL) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.value).value);
literal.typeTag = ((BLangLiteral) constantNode.value).typeTag;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
}
void addGlobalVariable(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean isPublic, boolean isFinal,
boolean isDeclaredWithVar, boolean isExpressionAvailable, boolean isListenerVar) {
BLangVariable var = (BLangVariable) this.generateBasicVarNode(pos, ws, identifier, isDeclaredWithVar,
isExpressionAvailable);
if (isPublic) {
var.flagSet.add(Flag.PUBLIC);
}
if (isFinal) {
var.flagSet.add(Flag.FINAL);
}
if (isListenerVar) {
var.flagSet.add(Flag.LISTENER);
}
attachAnnotations(var);
attachMarkdownDocumentations(var);
attachDeprecatedNode(var);
this.compUnit.addTopLevelNode(var);
}
void startRecordType() {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
typeNodeStack.push(recordTypeNode);
startVarList();
}
void startObjectType() {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
typeNodeStack.push(objectTypeNode);
startVarList();
startFieldBlockList();
}
void addObjectType(DiagnosticPos pos, Set<Whitespace> ws, boolean isFieldAnalyseRequired, boolean isAnonymous,
boolean isAbstract, boolean isClient, boolean isService) {
BLangObjectTypeNode objectTypeNode = populateObjectTypeNode(pos, ws, isAnonymous);
objectTypeNode.addWS(this.objectFieldBlockWs.pop());
objectTypeNode.isFieldAnalyseRequired = isFieldAnalyseRequired;
if (isAbstract) {
objectTypeNode.flagSet.add(Flag.ABSTRACT);
}
if (isClient) {
objectTypeNode.flagSet.add(Flag.CLIENT);
}
if (isService) {
objectTypeNode.flagSet.add(Flag.SERVICE);
}
if (!isAnonymous) {
addType(objectTypeNode);
return;
}
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = objectTypeNode;
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
addType(createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name));
}
private BLangObjectTypeNode populateObjectTypeNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isAnonymous) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeNodeStack.pop();
objectTypeNode.pos = pos;
objectTypeNode.addWS(ws);
objectTypeNode.isAnonymous = isAnonymous;
this.varListStack.pop().forEach(variableNode -> {
objectTypeNode.addField((SimpleVariableNode) variableNode);
});
return objectTypeNode;
}
void startFieldBlockList() {
this.objectFieldBlockWs.push(new TreeSet<>());
}
void endFiniteType(Set<Whitespace> ws) {
finiteTypeWsStack.push(ws);
}
void endTypeDefinition(DiagnosticPos pos, Set<Whitespace> ws, String identifier, DiagnosticPos identifierPos,
boolean publicType) {
BLangTypeDefinition typeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode = (BLangIdentifier) this.createIdentifier(identifier);
identifierNode.pos = identifierPos;
typeDefinition.setName(identifierNode);
if (publicType) {
typeDefinition.flagSet.add(Flag.PUBLIC);
}
BLangUnionTypeNode members = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
while (!typeNodeStack.isEmpty()) {
BLangType memberType = (BLangType) typeNodeStack.pop();
if (memberType.getKind() == NodeKind.UNION_TYPE_NODE) {
members.memberTypeNodes.addAll(((BLangUnionTypeNode) memberType).memberTypeNodes);
members.addWS(memberType.getWS());
} else {
members.memberTypeNodes.add(memberType);
}
}
if (!exprNodeStack.isEmpty()) {
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.addWS(finiteTypeWsStack.pop());
while (!exprNodeStack.isEmpty()) {
finiteTypeNode.valueSpace.add((BLangExpression) exprNodeStack.pop());
}
Collections.reverse(finiteTypeNode.valueSpace);
if (!members.memberTypeNodes.isEmpty()) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
members.memberTypeNodes.add(createUserDefinedType(pos, ws,
(BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name));
} else {
members.memberTypeNodes.add(finiteTypeNode);
}
}
if (members.memberTypeNodes.isEmpty()) {
typeDefinition.typeNode = null;
} else if (members.memberTypeNodes.size() == 1) {
BLangType[] memberArray = new BLangType[1];
members.memberTypeNodes.toArray(memberArray);
typeDefinition.typeNode = memberArray[0];
} else {
typeDefinition.typeNode = members;
}
if (finiteTypeWsStack.size() > 0) {
typeDefinition.addWS(finiteTypeWsStack.pop());
}
typeDefinition.pos = pos;
typeDefinition.addWS(ws);
Collections.reverse(markdownDocumentationStack);
attachMarkdownDocumentations(typeDefinition);
attachDeprecatedNode(typeDefinition);
attachAnnotations(typeDefinition);
this.compUnit.addTopLevelNode(typeDefinition);
}
void endObjectAttachedFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean publicFunc, boolean privateFunc,
boolean remoteFunc, boolean resourceFunc, boolean nativeFunc, boolean bodyExists,
boolean markdownDocPresent, boolean deprecatedDocPresent, int annCount) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
function.addWS(this.invocationWsStack.pop());
function.flagSet.add(Flag.ATTACHED);
if (publicFunc) {
function.flagSet.add(Flag.PUBLIC);
} else if (privateFunc) {
function.flagSet.add(Flag.PRIVATE);
}
if (remoteFunc) {
function.flagSet.add(Flag.REMOTE);
}
if (resourceFunc) {
function.flagSet.add(Flag.RESOURCE);
}
if (nativeFunc) {
function.flagSet.add(Flag.NATIVE);
}
if (!bodyExists) {
function.body = null;
if (!nativeFunc) {
function.flagSet.add(Flag.INTERFACE);
function.interfaceFunction = true;
}
}
function.attachedFunction = true;
attachAnnotations(function, annCount);
if (markdownDocPresent) {
attachMarkdownDocumentations(function);
}
if (deprecatedDocPresent) {
attachDeprecatedNode(function);
}
if (!function.deprecatedAttachments.isEmpty()) {
function.flagSet.add(Flag.DEPRECATED);
}
BLangObjectTypeNode objectNode = (BLangObjectTypeNode) this.typeNodeStack.peek();
if (Names.OBJECT_INIT_SUFFIX.value.equals(function.name.value)) {
function.objInitFunction = true;
objectNode.initFunction = function;
return;
}
objectNode.addFunction(function);
}
void endObjectOuterFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean publicFunc, boolean remoteFunc,
boolean nativeFunc, boolean bodyExists, String objectName) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
function.addWS(invocationWsStack.pop());
if (publicFunc) {
function.flagSet.add(Flag.PUBLIC);
}
if (remoteFunc) {
function.flagSet.add(Flag.REMOTE);
}
if (nativeFunc) {
function.flagSet.add(Flag.NATIVE);
}
if (!bodyExists) {
function.body = null;
}
TypeNode objectType = createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(),
(BLangIdentifier) createIdentifier(objectName));
BLangSimpleVariable receiver = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
receiver.pos = pos;
IdentifierNode name = createIdentifier(Names.SELF.getValue());
receiver.setName(name);
receiver.setTypeNode(objectType);
function.receiver = receiver;
function.flagSet.add(Flag.ATTACHED);
function.attachedOuterFunction = true;
if (!function.deprecatedAttachments.isEmpty()) {
function.flagSet.add(Flag.DEPRECATED);
}
this.compUnit.addTopLevelNode(function);
}
void startAnnotationDef(DiagnosticPos pos) {
BLangAnnotation annotNode = (BLangAnnotation) TreeBuilder.createAnnotationNode();
annotNode.pos = pos;
attachAnnotations(annotNode);
attachMarkdownDocumentations(annotNode);
attachDeprecatedNode(annotNode);
this.annotationStack.add(annotNode);
}
void endAnnotationDef(Set<Whitespace> ws, String identifier, DiagnosticPos identifierPos, boolean publicAnnotation,
boolean isTypeAttached) {
BLangAnnotation annotationNode = (BLangAnnotation) this.annotationStack.pop();
annotationNode.addWS(ws);
BLangIdentifier identifierNode = (BLangIdentifier) this.createIdentifier(identifier);
identifierNode.pos = identifierPos;
annotationNode.setName(identifierNode);
if (publicAnnotation) {
annotationNode.flagSet.add(Flag.PUBLIC);
}
while (!attachPointStack.empty()) {
annotationNode.attachPoints.add(attachPointStack.pop());
}
if (isTypeAttached) {
annotationNode.typeNode = (BLangType) this.typeNodeStack.pop();
}
this.compUnit.addTopLevelNode(annotationNode);
}
void startMarkdownDocumentationString(DiagnosticPos currentPos) {
BLangMarkdownDocumentation markdownDocumentationNode =
(BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode();
markdownDocumentationNode.pos = currentPos;
markdownDocumentationStack.push(markdownDocumentationNode);
}
void endMarkdownDocumentationString(Set<Whitespace> ws) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
markdownDocumentationNode.addWS(ws);
}
void endMarkDownDocumentLine(Set<Whitespace> ws) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
markdownDocumentationNode.addWS(ws);
}
void endMarkdownDocumentationText(DiagnosticPos pos, Set<Whitespace> ws, String text) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownDocumentationLine documentationDescription =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
documentationDescription.text = text;
documentationDescription.pos = pos;
documentationDescription.addWS(ws);
markdownDocumentationNode.addDocumentationLine(documentationDescription);
}
void endParameterDocumentationLine(Set<Whitespace> ws) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
markdownDocumentationNode.addWS(ws);
}
void endParameterDocumentation(DiagnosticPos pos, Set<Whitespace> ws, String parameterName, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownParameterDocumentation parameterDocumentationNode =
(BLangMarkdownParameterDocumentation) TreeBuilder.createMarkdownParameterDocumentationNode();
parameterDocumentationNode.parameterName = (BLangIdentifier) createIdentifier(parameterName);
parameterDocumentationNode.pos = pos;
parameterDocumentationNode.addWS(ws);
parameterDocumentationNode.addParameterDocumentationLine(description);
markdownDocumentationNode.addParameter(parameterDocumentationNode);
}
void endParameterDocumentationDescription(Set<Whitespace> ws, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownParameterDocumentation parameterDocumentation =
markdownDocumentationNode.getParameters().getLast();
parameterDocumentation.addWS(ws);
parameterDocumentation.addParameterDocumentationLine(description);
}
void endReturnParameterDocumentation(DiagnosticPos pos, Set<Whitespace> ws, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownReturnParameterDocumentation returnParameterDocumentation =
(BLangMarkdownReturnParameterDocumentation) TreeBuilder
.createMarkdownReturnParameterDocumentationNode();
returnParameterDocumentation.pos = pos;
returnParameterDocumentation.addWS(ws);
returnParameterDocumentation.addReturnParameterDocumentationLine(description);
markdownDocumentationNode.setReturnParameter(returnParameterDocumentation);
}
void endReturnParameterDocumentationDescription(Set<Whitespace> ws, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownReturnParameterDocumentation returnParameter = markdownDocumentationNode.getReturnParameter();
returnParameter.addWS(ws);
returnParameter.addReturnParameterDocumentationLine(description);
}
void createDeprecatedNode(DiagnosticPos pos,
Set<Whitespace> ws,
String content) {
BLangDeprecatedNode deprecatedNode = (BLangDeprecatedNode) TreeBuilder.createDeprecatedNode();
deprecatedNode.pos = pos;
deprecatedNode.addWS(ws);
deprecatedNode.documentationText = content;
deprecatedAttachmentStack.push(deprecatedNode);
}
void startAnnotationAttachment(DiagnosticPos currentPos) {
BLangAnnotationAttachment annotAttachmentNode =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
annotAttachmentNode.pos = currentPos;
annotAttachmentStack.push(annotAttachmentNode);
}
void setAnnotationAttachmentName(Set<Whitespace> ws, boolean hasExpr, DiagnosticPos currentPos,
boolean popAnnAttachment) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangAnnotationAttachment bLangAnnotationAttachment = (BLangAnnotationAttachment) annotAttachmentStack.peek();
bLangAnnotationAttachment.pos = currentPos;
bLangAnnotationAttachment.addWS(nameReference.ws);
bLangAnnotationAttachment.addWS(ws);
bLangAnnotationAttachment.setAnnotationName(nameReference.name);
bLangAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
if (hasExpr) {
bLangAnnotationAttachment.setExpression(exprNodeStack.pop());
}
if (popAnnAttachment) {
annotAttachmentStack.pop();
}
}
private void attachAnnotations(AnnotatableNode annotatableNode) {
annotAttachmentStack.forEach(annotatableNode::addAnnotationAttachment);
annotAttachmentStack.clear();
}
private void attachMarkdownDocumentations(DocumentableNode documentableNode) {
if (!markdownDocumentationStack.empty()) {
documentableNode.setMarkdownDocumentationAttachment(markdownDocumentationStack.pop());
}
}
private void attachDeprecatedNode(DocumentableNode documentableNode) {
if (!deprecatedAttachmentStack.empty()) {
documentableNode.addDeprecatedAttachment(deprecatedAttachmentStack.pop());
}
}
private void attachAnnotations(AnnotatableNode annotatableNode, int count) {
if (count == 0 || annotAttachmentStack.empty()) {
return;
}
List<AnnotationAttachmentNode> tempAnnotAttachments = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
if (annotAttachmentStack.empty()) {
break;
}
tempAnnotAttachments.add(annotAttachmentStack.pop());
}
Collections.reverse(tempAnnotAttachments);
tempAnnotAttachments.forEach(annotatableNode::addAnnotationAttachment);
}
void addAssignmentStatement(DiagnosticPos pos, Set<Whitespace> ws) {
ExpressionNode rExprNode = exprNodeStack.pop();
ExpressionNode lExprNode = exprNodeStack.pop();
BLangAssignment assignmentNode = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentNode.setExpression(rExprNode);
assignmentNode.pos = pos;
assignmentNode.addWS(ws);
assignmentNode.varRef = ((BLangVariableReference) lExprNode);
addStmtToCurrentBlock(assignmentNode);
}
void addTupleDestructuringStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean varDeclaration) {
BLangTupleDestructure stmt = (BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode();
stmt.pos = pos;
stmt.addWS(ws);
stmt.setDeclaredWithVar(varDeclaration);
stmt.expr = (BLangExpression) exprNodeStack.pop();
stmt.varRef = (BLangTupleVarRef) exprNodeStack.pop();
stmt.addWS(ws);
addStmtToCurrentBlock(stmt);
}
public void addRecordDestructuringStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean declaredWithVar) {
BLangRecordDestructure stmt = (BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode();
stmt.pos = pos;
stmt.addWS(ws);
stmt.setDeclaredWithVar(declaredWithVar);
stmt.expr = (BLangExpression) exprNodeStack.pop();
stmt.varRef = (BLangRecordVarRef) exprNodeStack.pop();
addStmtToCurrentBlock(stmt);
}
void startForeachStatement() {
startBlock();
}
void addCompoundAssignmentStatement(DiagnosticPos pos, Set<Whitespace> ws, String operator) {
BLangCompoundAssignment assignmentNode =
(BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
assignmentNode.setExpression(exprNodeStack.pop());
assignmentNode.setVariable((BLangVariableReference) exprNodeStack.pop());
assignmentNode.pos = pos;
assignmentNode.addWS(ws);
assignmentNode.addWS(this.operatorWs.pop());
assignmentNode.opKind = OperatorKind.valueFrom(operator);
addStmtToCurrentBlock(assignmentNode);
}
void addCompoundOperator(Set<Whitespace> ws) {
this.operatorWs.push(ws);
}
void addForeachStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.addWS(ws);
foreach.pos = pos;
foreach.setCollection(exprNodeStack.pop());
foreach.addWS(commaWsStack.pop());
List<ExpressionNode> lExprList = exprNodeListStack.pop();
lExprList.forEach(expressionNode -> foreach.addVariable((BLangVariableReference) expressionNode));
BLangBlockStmt foreachBlock = (BLangBlockStmt) this.blockNodeStack.pop();
foreachBlock.pos = pos;
foreach.setBody(foreachBlock);
addStmtToCurrentBlock(foreach);
}
void startWhileStmt() {
startBlock();
}
void addWhileStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.setCondition(exprNodeStack.pop());
whileNode.pos = pos;
whileNode.addWS(ws);
BLangBlockStmt whileBlock = (BLangBlockStmt) this.blockNodeStack.pop();
whileBlock.pos = pos;
whileNode.setBody(whileBlock);
addStmtToCurrentBlock(whileNode);
}
void startLockStmt() {
startBlock();
}
void addLockStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode();
lockNode.pos = pos;
lockNode.addWS(ws);
BLangBlockStmt lockBlock = (BLangBlockStmt) this.blockNodeStack.pop();
lockBlock.pos = pos;
lockNode.setBody(lockBlock);
addStmtToCurrentBlock(lockNode);
}
public void addContinueStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangContinue nextNode = (BLangContinue) TreeBuilder.createContinueNode();
nextNode.pos = pos;
nextNode.addWS(ws);
addStmtToCurrentBlock(nextNode);
}
void addBreakStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangBreak breakNode = (BLangBreak) TreeBuilder.createBreakNode();
breakNode.pos = pos;
breakNode.addWS(ws);
addStmtToCurrentBlock(breakNode);
}
void addReturnStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean exprAvailable) {
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.pos = pos;
retStmt.addWS(ws);
if (exprAvailable) {
retStmt.expr = (BLangExpression) this.exprNodeStack.pop();
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = pos;
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.typeTag = TypeTags.NIL;
retStmt.expr = nilLiteral;
}
addStmtToCurrentBlock(retStmt);
}
void startTransactionStmt() {
transactionNodeStack.push(TreeBuilder.createTransactionNode());
startBlock();
}
void addTransactionBlock(DiagnosticPos pos, Set<Whitespace> ws) {
TransactionNode transactionNode = transactionNodeStack.peek();
BLangBlockStmt transactionBlock = (BLangBlockStmt) this.blockNodeStack.pop();
transactionBlock.pos = pos;
transactionNode.addWS(ws);
transactionNode.setTransactionBody(transactionBlock);
}
void endTransactionPropertyInitStatementList(Set<Whitespace> ws) {
TransactionNode transactionNode = transactionNodeStack.peek();
transactionNode.addWS(ws);
}
void startOnretryBlock() {
startBlock();
}
void addOnretryBlock(DiagnosticPos pos, Set<Whitespace> ws) {
TransactionNode transactionNode = transactionNodeStack.peek();
BLangBlockStmt onretryBlock = (BLangBlockStmt) this.blockNodeStack.pop();
onretryBlock.pos = pos;
transactionNode.addWS(ws);
transactionNode.setOnRetryBody(onretryBlock);
}
void endTransactionStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.pop();
transaction.pos = pos;
transaction.addWS(ws);
addStmtToCurrentBlock(transaction);
String value = compilerOptions.get(CompilerOptionName.TRANSACTION_EXISTS);
if (value != null) {
return;
}
compilerOptions.put(CompilerOptionName.TRANSACTION_EXISTS, "true");
List<String> nameComps = getPackageNameComps(Names.TRANSACTION_PACKAGE.value);
addImportPackageDeclaration(pos, null, Names.TRANSACTION_ORG.value, nameComps, Names.DEFAULT_VERSION.value,
Names.DOT.value + nameComps.get(nameComps.size() - 1));
}
void addAbortStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangAbort abortNode = (BLangAbort) TreeBuilder.createAbortNode();
abortNode.pos = pos;
abortNode.addWS(ws);
addStmtToCurrentBlock(abortNode);
}
void addDoneStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangDone doneNode = (BLangDone) TreeBuilder.createDoneNode();
doneNode.pos = pos;
doneNode.addWS(ws);
addStmtToCurrentBlock(doneNode);
}
void addRetryStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode();
retryNode.pos = pos;
retryNode.addWS(ws);
addStmtToCurrentBlock(retryNode);
}
void addRetryCountExpression(Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.peek();
transaction.addWS(ws);
transaction.retryCount = (BLangExpression) exprNodeStack.pop();
}
void addCommittedBlock(Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.peek();
transaction.addWS(ws);
transaction.onCommitFunction = (BLangExpression) exprNodeStack.pop();
}
void addAbortedBlock(Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.peek();
transaction.addWS(ws);
transaction.onAbortFunction = (BLangExpression) exprNodeStack.pop();
}
void startIfElseNode(DiagnosticPos pos) {
BLangIf ifNode = (BLangIf) TreeBuilder.createIfElseStatementNode();
ifNode.pos = pos;
ifElseStatementStack.push(ifNode);
startBlock();
}
void addIfBlock(DiagnosticPos pos, Set<Whitespace> ws) {
IfNode ifNode = ifElseStatementStack.peek();
((BLangIf) ifNode).pos = pos;
ifNode.addWS(ws);
ifNode.setCondition(exprNodeStack.pop());
ifNode.setBody(blockNodeStack.pop());
}
void addElseIfBlock(DiagnosticPos pos, Set<Whitespace> ws) {
IfNode elseIfNode = ifElseStatementStack.pop();
((BLangIf) elseIfNode).pos = pos;
elseIfNode.setCondition(exprNodeStack.pop());
elseIfNode.setBody(blockNodeStack.pop());
elseIfNode.addWS(ws);
IfNode parentIfNode = ifElseStatementStack.peek();
while (parentIfNode.getElseStatement() != null) {
parentIfNode = (IfNode) parentIfNode.getElseStatement();
}
parentIfNode.setElseStatement(elseIfNode);
}
void addElseBlock(DiagnosticPos pos, Set<Whitespace> ws) {
IfNode ifNode = ifElseStatementStack.peek();
while (ifNode.getElseStatement() != null) {
ifNode = (IfNode) ifNode.getElseStatement();
}
BlockNode elseBlock = blockNodeStack.pop();
elseBlock.addWS(ws);
((BLangBlockStmt) elseBlock).pos = pos;
ifNode.setElseStatement(elseBlock);
}
void endIfElseNode(Set<Whitespace> ws) {
IfNode ifNode = ifElseStatementStack.pop();
ifNode.addWS(ws);
addStmtToCurrentBlock(ifNode);
}
void createMatchNode(DiagnosticPos pos) {
if (this.matchStmtStack == null) {
this.matchStmtStack = new ArrayDeque<>();
}
BLangMatch matchStmt = (BLangMatch) TreeBuilder.createMatchStatement();
matchStmt.pos = pos;
this.matchStmtStack.addFirst(matchStmt);
}
void completeMatchNode(DiagnosticPos pos, Set<Whitespace> ws) {
BLangMatch matchStmt = this.matchStmtStack.removeFirst();
matchStmt.pos = pos;
matchStmt.addWS(ws);
matchStmt.expr = (BLangExpression) this.exprNodeStack.pop();
addStmtToCurrentBlock(matchStmt);
}
void startMatchStmtPattern() {
startBlock();
}
void addMatchStmtStaticBindingPattern(DiagnosticPos pos, Set<Whitespace> ws) {
BLangMatchStaticBindingPatternClause patternClause =
(BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern();
patternClause.pos = pos;
patternClause.addWS(ws);
patternClause.literal = (BLangExpression) this.exprNodeStack.pop();
patternClause.body = (BLangBlockStmt) blockNodeStack.pop();
patternClause.body.pos = pos;
this.matchStmtStack.peekFirst().patternClauses.add(patternClause);
}
void addMatchStmtStructuredBindingPattern(DiagnosticPos pos, Set<Whitespace> ws, boolean isTypeGuardPresent) {
BLangMatchStructuredBindingPatternClause patternClause =
(BLangMatchStructuredBindingPatternClause) TreeBuilder.createMatchStatementStructuredBindingPattern();
patternClause.pos = pos;
patternClause.addWS(ws);
patternClause.bindingPatternVariable = this.varStack.pop();
patternClause.body = (BLangBlockStmt) blockNodeStack.pop();
patternClause.body.pos = pos;
if (isTypeGuardPresent) {
patternClause.typeGuardExpr = (BLangExpression) exprNodeStack.pop();
}
this.matchStmtStack.peekFirst().patternClauses.add(patternClause);
}
void addWorkerSendStmt(DiagnosticPos pos, Set<Whitespace> ws, String workerName, boolean hasKey) {
BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode();
workerSendNode.setWorkerName(this.createIdentifier(workerName));
workerSendNode.expr = (BLangExpression) exprNodeStack.pop();
workerSendNode.pos = pos;
workerSendNode.addWS(ws);
if (hasKey) {
workerSendNode.keyExpr = workerSendNode.expr;
workerSendNode.expr = (BLangExpression) exprNodeStack.pop();
workerSendNode.isChannel = true;
}
addStmtToCurrentBlock(workerSendNode);
}
void addWorkerReceiveExpr(DiagnosticPos pos, Set<Whitespace> ws, String workerName, boolean hasKey) {
BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode();
workerReceiveExpr.setWorkerName(this.createIdentifier(workerName));
workerReceiveExpr.pos = pos;
workerReceiveExpr.addWS(ws);
if (hasKey) {
workerReceiveExpr.keyExpr = (BLangExpression) exprNodeStack.pop();
workerReceiveExpr.isChannel = true;
}
addExpressionNode(workerReceiveExpr);
}
void addWorkerFlushExpr(DiagnosticPos pos, Set<Whitespace> ws, String workerName) {
BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode();
if (workerName != null) {
workerFlushExpr.workerIdentifier = (BLangIdentifier) createIdentifier(workerName);
}
workerFlushExpr.pos = pos;
workerFlushExpr.addWS(ws);
addExpressionNode(workerFlushExpr);
}
void addWorkerSendSyncExpr(DiagnosticPos pos, Set<Whitespace> ws, String workerName) {
BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode();
workerSendExpr.setWorkerName(this.createIdentifier(workerName));
workerSendExpr.expr = (BLangExpression) exprNodeStack.pop();
workerSendExpr.pos = pos;
workerSendExpr.addWS(ws);
addExpressionNode(workerSendExpr);
}
void addExpressionStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangExpressionStmt exprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
exprStmt.pos = pos;
exprStmt.addWS(ws);
exprStmt.expr = (BLangExpression) exprNodeStack.pop();
addStmtToCurrentBlock(exprStmt);
}
void startServiceDef(DiagnosticPos pos) {
BLangService serviceNode = (BLangService) TreeBuilder.createServiceNode();
serviceNode.pos = pos;
attachAnnotations(serviceNode);
attachMarkdownDocumentations(serviceNode);
attachDeprecatedNode(serviceNode);
serviceNodeStack.push(serviceNode);
}
void endServiceDef(DiagnosticPos pos, Set<Whitespace> ws, String serviceName, DiagnosticPos identifierPos,
boolean isAnonServiceValue) {
BLangService serviceNode = (BLangService) serviceNodeStack.pop();
serviceNode.pos = pos;
serviceNode.addWS(ws);
serviceNode.isAnonymousServiceValue = isAnonServiceValue;
if (serviceName == null) {
serviceName = this.anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
identifierPos = pos;
}
BLangIdentifier identifier = (BLangIdentifier) createIdentifier(serviceName);
identifier.pos = identifierPos;
serviceNode.setName(identifier);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
typeDef.setName(identifier);
typeDef.flagSet.add(Flag.SERVICE);
typeDef.typeNode = (BLangType) this.typeNodeStack.pop();
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
serviceNode.serviceTypeDefinition = typeDef;
serviceNode.serviceUDT = createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(),
typeDef.name);
this.compUnit.addTopLevelNode(serviceNode);
if (!isAnonServiceValue) {
serviceNode.attachExpr = (BLangExpression) this.exprNodeStack.pop();
return;
}
final BLangServiceConstructorExpr serviceConstNode = (BLangServiceConstructorExpr) TreeBuilder
.createServiceConstructorNode();
serviceConstNode.serviceNode = serviceNode;
serviceConstNode.pos = pos;
serviceConstNode.addWS(ws);
addExpressionNode(serviceConstNode);
}
void createXMLQName(DiagnosticPos pos, Set<Whitespace> ws, String localname, String prefix) {
BLangXMLQName qname = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
qname.localname = (BLangIdentifier) createIdentifier(localname);
qname.prefix = (BLangIdentifier) createIdentifier(prefix);
qname.pos = pos;
qname.addWS(ws);
addExpressionNode(qname);
}
void createXMLAttribute(DiagnosticPos pos, Set<Whitespace> ws) {
BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode();
xmlAttribute.value = (BLangXMLQuotedString) exprNodeStack.pop();
xmlAttribute.name = (BLangExpression) exprNodeStack.pop();
xmlAttribute.pos = pos;
xmlAttribute.addWS(ws);
xmlAttributeNodeStack.push(xmlAttribute);
}
void attachXmlLiteralWS(Set<Whitespace> ws) {
this.exprNodeStack.peek().addWS(ws);
}
void startXMLElement(DiagnosticPos pos, Set<Whitespace> ws, boolean isRoot) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
BLangExpression startTag = (BLangExpression) exprNodeStack.pop();
xmlElement.addWS(ws);
xmlElement.startTagName = startTag;
xmlElement.pos = pos;
xmlElement.isRoot = isRoot;
xmlAttributeNodeStack.forEach(xmlElement::addAttribute);
xmlAttributeNodeStack.clear();
addExpressionNode(xmlElement);
}
void endXMLElement(Set<Whitespace> ws) {
BLangExpression endTag = (BLangExpression) exprNodeStack.pop();
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) exprNodeStack.peek();
xmlElement.addWS(ws);
xmlElement.endTagName = endTag;
}
void createXMLQuotedLiteral(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText,
QuoteType quoteType) {
List<BLangExpression> templateExprs =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode();
quotedString.pos = pos;
quotedString.quoteType = quoteType;
quotedString.textFragments = templateExprs;
addExpressionNode(quotedString);
}
void addChildToXMLElement(Set<Whitespace> ws) {
XMLLiteralNode child = (XMLLiteralNode) exprNodeStack.pop();
child.addWS(ws);
BLangXMLElementLiteral parentXMLExpr = (BLangXMLElementLiteral) exprNodeStack.peek();
parentXMLExpr.addChild(child);
}
void createXMLTextLiteral(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
xmlTextLiteral.pos = pos;
addExpressionNode(xmlTextLiteral);
}
void addXMLTextToElement(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
List<BLangExpression> templateExprs =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
BLangXMLElementLiteral parentElement = (BLangXMLElementLiteral) exprNodeStack.peek();
templateExprs.forEach(parentElement::addChild);
}
void createXMLCommentLiteral(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
xmlCommentLiteral.textFragments =
getExpressionsInTemplate(pos, null, precedingTextFragments, endingText);
xmlCommentLiteral.pos = pos;
xmlCommentLiteral.addWS(ws);
addExpressionNode(xmlCommentLiteral);
}
void createXMLPILiteral(DiagnosticPos pos,
Set<Whitespace> ws,
String targetQName,
Stack<String> precedingTextFragments,
String endingText) {
List<BLangExpression> dataExprs =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
addLiteralValue(pos, ws, TypeTags.STRING, targetQName);
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
xmlProcInsLiteral.pos = pos;
xmlProcInsLiteral.dataFragments = dataExprs;
xmlProcInsLiteral.target = (BLangLiteral) exprNodeStack.pop();
addExpressionNode(xmlProcInsLiteral);
}
void addXMLNSDeclaration(DiagnosticPos pos,
Set<Whitespace> ws,
String namespaceUri,
String prefix,
boolean isTopLevel) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
prefixIdentifer.pos = pos;
prefixIdentifer.value = prefix;
addLiteralValue(pos, removeNthFromStart(ws, 1), TypeTags.STRING, namespaceUri);
xmlns.namespaceURI = (BLangLiteral) exprNodeStack.pop();
xmlns.prefix = prefixIdentifer;
xmlns.pos = pos;
xmlns.addWS(ws);
if (isTopLevel) {
this.compUnit.addTopLevelNode(xmlns);
return;
}
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = pos;
addStmtToCurrentBlock(xmlnsStmt);
}
void createStringTemplateLiteral(DiagnosticPos pos, Set<Whitespace> ws, Stack<String> precedingTextFragments,
String endingText) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
stringTemplateLiteral.exprs =
getExpressionsInTemplate(pos, null, precedingTextFragments, endingText);
stringTemplateLiteral.addWS(ws);
stringTemplateLiteral.pos = pos;
addExpressionNode(stringTemplateLiteral);
}
void createXmlAttributesRefExpr(DiagnosticPos pos, Set<Whitespace> ws, boolean singleAttribute) {
BLangXMLAttributeAccess xmlAttributeAccess =
(BLangXMLAttributeAccess) TreeBuilder.createXMLAttributeAccessNode();
xmlAttributeAccess.pos = pos;
xmlAttributeAccess.addWS(ws);
if (singleAttribute) {
xmlAttributeAccess.indexExpr = (BLangExpression) exprNodeStack.pop();
}
xmlAttributeAccess.expr = (BLangVariableReference) exprNodeStack.pop();
addExpressionNode(xmlAttributeAccess);
}
void addIntRangeExpression(DiagnosticPos pos,
Set<Whitespace> ws,
boolean includeStart,
boolean includeEnd,
boolean noUpperBound) {
BLangIntRangeExpression intRangeExpr = (BLangIntRangeExpression) TreeBuilder.createIntRangeExpression();
intRangeExpr.pos = pos;
intRangeExpr.addWS(ws);
if (!noUpperBound) {
intRangeExpr.endExpr = (BLangExpression) this.exprNodeStack.pop();
}
intRangeExpr.startExpr = (BLangExpression) this.exprNodeStack.pop();
intRangeExpr.includeStart = includeStart;
intRangeExpr.includeEnd = includeEnd;
exprNodeStack.push(intRangeExpr);
}
void addNamedArgument(DiagnosticPos pos, Set<Whitespace> ws, String name) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = pos;
namedArg.addWS(ws);
namedArg.name = (BLangIdentifier) this.createIdentifier(name);
namedArg.expr = (BLangExpression) this.exprNodeStack.pop();
addExpressionNode(namedArg);
}
void addRestArgument(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
varArgs.pos = pos;
varArgs.addWS(ws);
varArgs.expr = (BLangExpression) this.exprNodeStack.pop();
addExpressionNode(varArgs);
}
void addDefaultableParam(DiagnosticPos pos, Set<Whitespace> ws) {
BLangSimpleVariableDef defaultableParam =
(BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
defaultableParam.pos = pos;
defaultableParam.addWS(ws);
List<BLangVariable> params = this.varListStack.peek();
BLangSimpleVariable var = (BLangSimpleVariable) params.remove(params.size() - 1);
var.expr = (BLangExpression) this.exprNodeStack.pop();
defaultableParam.var = var;
this.defaultableParamsList.add(defaultableParam);
}
void addRestParam(DiagnosticPos pos, Set<Whitespace> ws, String identifier, int annotCount) {
BLangSimpleVariable restParam = (BLangSimpleVariable) this.generateBasicVarNode(pos, ws, identifier, false);
attachAnnotations(restParam, annotCount);
restParam.pos = pos;
BLangArrayType typeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
typeNode.elemtype = restParam.typeNode;
typeNode.dimensions = 1;
restParam.typeNode = typeNode;
this.restParamStack.push(restParam);
}
private List<BLangExpression> getExpressionsInTemplate(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
List<BLangExpression> expressions = new ArrayList<>();
String originalValue = endingText;
endingText = endingText == null ? "" : StringEscapeUtils.unescapeJava(endingText);
addLiteralValue(pos, ws, TypeTags.STRING, endingText, originalValue);
expressions.add((BLangExpression) exprNodeStack.pop());
while (!precedingTextFragments.empty()) {
expressions.add((BLangExpression) exprNodeStack.pop());
String textFragment = precedingTextFragments.pop();
originalValue = textFragment;
textFragment = textFragment == null ? "" : StringEscapeUtils.unescapeJava(textFragment);
addLiteralValue(pos, ws, TypeTags.STRING, textFragment, originalValue);
expressions.add((BLangExpression) exprNodeStack.pop());
}
Collections.reverse(expressions);
return expressions;
}
void endCompilationUnit(Set<Whitespace> ws) {
compUnit.addWS(ws);
}
void endCallableParamList(Set<Whitespace> ws) {
this.invokableNodeStack.peek().addWS(ws);
}
void endFuncTypeParamList(Set<Whitespace> ws) {
this.commaWsStack.push(ws);
}
private Set<Whitespace> removeNthFromLast(Set<Whitespace> ws, int n) {
if (ws == null) {
return null;
}
return removeNth(((TreeSet<Whitespace>) ws).descendingIterator(), n);
}
private Set<Whitespace> removeNthFromStart(Set<Whitespace> ws, int n) {
if (ws == null) {
return null;
}
return removeNth(ws.iterator(), n);
}
private Set<Whitespace> removeNth(Iterator<Whitespace> iterator, int n) {
int i = 0;
while (iterator.hasNext()) {
Whitespace next = iterator.next();
if (i++ == n) {
Set<Whitespace> varWS = new TreeSet<>();
varWS.add(next);
iterator.remove();
return varWS;
}
}
return null;
}
private BLangUserDefinedType createUserDefinedType(DiagnosticPos pos,
Set<Whitespace> ws,
BLangIdentifier pkgAlias,
BLangIdentifier name) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
userDefinedType.pos = pos;
userDefinedType.addWS(ws);
userDefinedType.pkgAlias = pkgAlias;
userDefinedType.typeName = name;
return userDefinedType;
}
private List<String> getPackageNameComps(String sourcePkg) {
String[] pkgParts = sourcePkg.split("\\.|\\\\|\\/");
return Arrays.asList(pkgParts);
}
void startOrderByClauseNode(DiagnosticPos pos) {
OrderByNode orderByNode = TreeBuilder.createOrderByNode();
((BLangOrderBy) orderByNode).pos = pos;
this.orderByClauseStack.push(orderByNode);
}
void endOrderByClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
OrderByNode orderByNode = this.orderByClauseStack.peek();
((BLangOrderBy) orderByNode).pos = pos;
orderByNode.addWS(ws);
Collections.reverse(orderByVariableStack);
while (!this.orderByVariableStack.empty()) {
orderByNode.addOrderByVariable(this.orderByVariableStack.pop());
}
}
void startOrderByVariableNode(DiagnosticPos pos) {
OrderByVariableNode orderByVariableNode = TreeBuilder.createOrderByVariableNode();
((BLangOrderByVariable) orderByVariableNode).pos = pos;
this.orderByVariableStack.push(orderByVariableNode);
}
void endOrderByVariableNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isAscending,
boolean isDescending) {
OrderByVariableNode orderByVariableNode = this.orderByVariableStack.peek();
((BLangOrderByVariable) orderByVariableNode).pos = pos;
orderByVariableNode.addWS(ws);
orderByVariableNode.setVariableReference(this.exprNodeStack.pop());
orderByVariableNode.setOrderByType(isAscending, isDescending);
}
void startLimitClauseNode(DiagnosticPos pos) {
LimitNode limitNode = TreeBuilder.createLimitNode();
((BLangLimit) limitNode).pos = pos;
this.limitClauseStack.push(limitNode);
}
void endLimitClauseNode(DiagnosticPos pos, Set<Whitespace> ws, String limitValue) {
LimitNode limitNode = this.limitClauseStack.peek();
((BLangLimit) limitNode).pos = pos;
limitNode.addWS(ws);
limitNode.setLimitValue(limitValue);
}
void startGroupByClauseNode(DiagnosticPos pos) {
GroupByNode groupByNode = TreeBuilder.createGroupByNode();
((BLangGroupBy) groupByNode).pos = pos;
this.groupByClauseStack.push(groupByNode);
}
void endGroupByClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
GroupByNode groupByNode = this.groupByClauseStack.peek();
((BLangGroupBy) groupByNode).pos = pos;
groupByNode.addWS(ws);
groupByNode.addWS(commaWsStack.pop());
this.exprNodeListStack.pop().forEach(groupByNode::addVariableReference);
}
void startHavingClauseNode(DiagnosticPos pos) {
HavingNode havingNode = TreeBuilder.createHavingNode();
((BLangHaving) havingNode).pos = pos;
this.havingClauseStack.push(havingNode);
}
void endHavingClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
HavingNode havingNode = this.havingClauseStack.peek();
((BLangHaving) havingNode).pos = pos;
havingNode.addWS(ws);
havingNode.setExpression(this.exprNodeStack.pop());
}
void startSelectExpressionNode(DiagnosticPos pos) {
SelectExpressionNode selectExpr = TreeBuilder.createSelectExpressionNode();
((BLangSelectExpression) selectExpr).pos = pos;
this.selectExpressionsStack.push(selectExpr);
}
void endSelectExpressionNode(String identifier, DiagnosticPos pos, Set<Whitespace> ws) {
SelectExpressionNode selectExpression = this.selectExpressionsStack.peek();
selectExpression.setExpression(exprNodeStack.pop());
((BLangSelectExpression) selectExpression).pos = pos;
selectExpression.addWS(ws);
selectExpression.setIdentifier(identifier);
}
void startSelectExpressionList() {
this.selectExpressionsListStack.push(new ArrayList<>());
}
void endSelectExpressionList(Set<Whitespace> ws, int selectExprCount) {
commaWsStack.push(ws);
List<SelectExpressionNode> selectExprList = this.selectExpressionsListStack.peek();
addSelectExprToSelectExprNodeList(selectExprList, selectExprCount);
}
private void addSelectExprToSelectExprNodeList(List<SelectExpressionNode> selectExprList, int n) {
if (this.selectExpressionsStack.empty()) {
throw new IllegalStateException("Select expression stack cannot be empty in processing a SelectClause");
}
SelectExpressionNode expr = this.selectExpressionsStack.pop();
if (n > 1) {
addSelectExprToSelectExprNodeList(selectExprList, n - 1);
}
selectExprList.add(expr);
}
void startWhereClauseNode(DiagnosticPos pos) {
WhereNode whereNode = TreeBuilder.createWhereNode();
((BLangWhere) whereNode).pos = pos;
this.whereClauseStack.push(whereNode);
}
void endWhereClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
WhereNode whereNode = this.whereClauseStack.peek();
((BLangWhere) whereNode).pos = pos;
whereNode.addWS(ws);
whereNode.setExpression(exprNodeStack.pop());
}
void startSelectClauseNode(DiagnosticPos pos) {
SelectClauseNode selectClauseNode = TreeBuilder.createSelectClauseNode();
((BLangSelectClause) selectClauseNode).pos = pos;
this.selectClausesStack.push(selectClauseNode);
}
void endSelectClauseNode(boolean isSelectAll, boolean isGroupByAvailable, boolean isHavingAvailable,
DiagnosticPos pos, Set<Whitespace> ws) {
SelectClauseNode selectClauseNode = this.selectClausesStack.peek();
((BLangSelectClause) selectClauseNode).pos = pos;
selectClauseNode.addWS(ws);
if (!isSelectAll) {
selectClauseNode.addWS(commaWsStack.pop());
selectClauseNode.setSelectExpressions(this.selectExpressionsListStack.pop());
} else {
selectClauseNode.setSelectAll(true);
}
if (isGroupByAvailable) {
selectClauseNode.setGroupBy(this.groupByClauseStack.pop());
}
if (isHavingAvailable) {
selectClauseNode.setHaving(this.havingClauseStack.pop());
}
}
void startWindowClauseNode(DiagnosticPos pos) {
WindowClauseNode windowClauseNode = TreeBuilder.createWindowClauseNode();
((BLangWindow) windowClauseNode).pos = pos;
this.windowClausesStack.push(windowClauseNode);
}
void endWindowsClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
WindowClauseNode windowClauseNode = this.windowClausesStack.peek();
((BLangWindow) windowClauseNode).pos = pos;
windowClauseNode.addWS(ws);
windowClauseNode.setFunctionInvocation(this.exprNodeStack.pop());
if (this.exprNodeStack.size() > 1) {
List<ExpressionNode> exprList = new ArrayList<>();
addExprToExprNodeList(exprList, this.exprNodeStack.size() - 1);
StreamingInput streamingInput = this.streamingInputStack.peek();
streamingInput.setPreFunctionInvocations(exprList);
}
if (!this.whereClauseStack.empty()) {
this.streamingInputStack.peek().setWindowTraversedAfterWhere(true);
} else {
this.streamingInputStack.peek().setWindowTraversedAfterWhere(false);
}
}
void startStreamingInputNode(DiagnosticPos pos) {
StreamingInput streamingInput = TreeBuilder.createStreamingInputNode();
((BLangStreamingInput) streamingInput).pos = pos;
this.streamingInputStack.push(streamingInput);
}
void endStreamingInputNode(String alias, DiagnosticPos pos,
Set<Whitespace> ws) {
BLangStreamingInput streamingInput = (BLangStreamingInput) this.streamingInputStack.peek();
streamingInput.pos = pos;
streamingInput.addWS(ws);
if (this.whereClauseStack.size() == 2) {
streamingInput.setAfterStreamingCondition(this.whereClauseStack.pop());
streamingInput.setBeforeStreamingCondition(this.whereClauseStack.pop());
} else if (this.whereClauseStack.size() == 1) {
if (streamingInput.isWindowTraversedAfterWhere()) {
streamingInput.setBeforeStreamingCondition(this.whereClauseStack.pop());
} else {
streamingInput.setAfterStreamingCondition(this.whereClauseStack.pop());
}
}
if (this.exprNodeStack.size() > 1) {
List<ExpressionNode> exprList = new ArrayList<>();
addExprToExprNodeList(exprList, this.exprNodeStack.size() - 1);
streamingInput.setPostFunctionInvocations(exprList);
}
if (!this.windowClausesStack.empty()) {
streamingInput.setWindowClause(this.windowClausesStack.pop());
}
streamingInput.setStreamReference(this.exprNodeStack.pop());
streamingInput.setAlias(alias);
}
void startJoinStreamingInputNode(DiagnosticPos pos) {
JoinStreamingInput joinStreamingInput = TreeBuilder.createJoinStreamingInputNode();
((BLangJoinStreamingInput) joinStreamingInput).pos = pos;
this.joinStreamingInputsStack.push(joinStreamingInput);
}
void endJoinStreamingInputNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isUnidirectionalBeforeJoin,
boolean isUnidirectionalAfterJoin, String joinType) {
JoinStreamingInput joinStreamingInput = this.joinStreamingInputsStack.peek();
((BLangJoinStreamingInput) joinStreamingInput).pos = pos;
joinStreamingInput.addWS(ws);
joinStreamingInput.setStreamingInput(this.streamingInputStack.pop());
if (this.exprNodeStack.size() > 0) {
joinStreamingInput.setOnExpression(this.exprNodeStack.pop());
}
joinStreamingInput.setUnidirectionalBeforeJoin(isUnidirectionalBeforeJoin);
joinStreamingInput.setUnidirectionalAfterJoin(isUnidirectionalAfterJoin);
joinStreamingInput.setJoinType(joinType);
}
void endJoinType(Set<Whitespace> ws) {
JoinStreamingInput joinStreamingInput = this.joinStreamingInputsStack.peek();
joinStreamingInput.addWS(ws);
}
void startTableQueryNode(DiagnosticPos pos) {
TableQuery tableQuery = TreeBuilder.createTableQueryNode();
((BLangTableQuery) tableQuery).pos = pos;
this.tableQueriesStack.push(tableQuery);
}
void endTableQueryNode(boolean isJoinClauseAvailable, boolean isSelectClauseAvailable,
boolean isOrderByClauseAvailable, boolean isLimitClauseAvailable, DiagnosticPos pos,
Set<Whitespace> ws) {
BLangTableQuery tableQuery = (BLangTableQuery) this.tableQueriesStack.peek();
tableQuery.pos = pos;
tableQuery.addWS(ws);
tableQuery.setStreamingInput(this.streamingInputStack.pop());
if (isJoinClauseAvailable) {
tableQuery.setJoinStreamingInput(this.joinStreamingInputsStack.pop());
}
if (isSelectClauseAvailable) {
tableQuery.setSelectClause(this.selectClausesStack.pop());
}
if (isOrderByClauseAvailable) {
tableQuery.setOrderByClause(this.orderByClauseStack.pop());
}
if (isLimitClauseAvailable) {
tableQuery.setLimitClause(this.limitClauseStack.pop());
}
}
void addTableQueryExpression(DiagnosticPos pos, Set<Whitespace> ws) {
TableQueryExpression tableQueryExpression = TreeBuilder.createTableQueryExpression();
((BLangTableQueryExpression) tableQueryExpression).pos = pos;
tableQueryExpression.addWS(ws);
tableQueryExpression.setTableQuery(tableQueriesStack.pop());
this.exprNodeStack.push(tableQueryExpression);
}
void startSetAssignmentClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
SetAssignmentNode setAssignmentNode = TreeBuilder.createSetAssignmentNode();
((BLangSetAssignment) setAssignmentNode).pos = pos;
setAssignmentNode.addWS(ws);
this.setAssignmentStack.push(setAssignmentNode);
}
void endSetAssignmentClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
if (this.exprNodeStack.empty()) {
throw new IllegalStateException("Expression stack cannot be empty in processing a Set Assignment Clause");
}
SetAssignmentNode setAssignmentNode = this.setAssignmentStack.peek();
((BLangSetAssignment) setAssignmentNode).pos = pos;
setAssignmentNode.addWS(ws);
setAssignmentNode.setExpression(exprNodeStack.pop());
setAssignmentNode.setVariableReference(exprNodeStack.pop());
}
void startSetClauseNode() {
this.setAssignmentListStack.push(new ArrayList<>());
}
void endSetClauseNode(Set<Whitespace> ws, int selectExprCount) {
List<SetAssignmentNode> setAssignmentNodeList = this.setAssignmentListStack.peek();
addSetAssignmentToSelectAssignmentNodeList(setAssignmentNodeList, selectExprCount);
}
private void addSetAssignmentToSelectAssignmentNodeList(List<SetAssignmentNode> setAssignmentNodeList, int n) {
if (this.setAssignmentStack.empty()) {
throw new IllegalStateException("Set expression stack cannot be empty in processing a SelectClause");
}
SetAssignmentNode expr = this.setAssignmentStack.pop();
if (n > 1) {
addSetAssignmentToSelectAssignmentNodeList(setAssignmentNodeList, n - 1);
}
setAssignmentNodeList.add(expr);
}
void startStreamActionNode(DiagnosticPos pos, PackageID packageID) {
StreamActionNode streamActionNode = TreeBuilder.createStreamActionNode();
((BLangStreamAction) streamActionNode).pos = pos;
this.streamActionNodeStack.push(streamActionNode);
this.startLambdaFunctionDef(packageID, 0);
this.startBlock();
}
void endStreamActionNode(DiagnosticPos pos, Set<Whitespace> ws) {
endCallableUnitBody(ws);
StreamActionNode streamActionNode = this.streamActionNodeStack.peek();
((BLangStreamAction) streamActionNode).pos = pos;
streamActionNode.addWS(ws);
this.varListStack.push(new ArrayList<>());
this.varListStack.peek().add(this.varStack.pop());
this.commaWsStack.push(ws);
this.addLambdaFunctionDef(pos, ws, true, false, false);
streamActionNode.setInvokableBody((BLangLambdaFunction) this.exprNodeStack.pop());
}
void startPatternStreamingEdgeInputNode(DiagnosticPos pos) {
PatternStreamingEdgeInputNode patternStreamingEdgeInputNode = TreeBuilder.createPatternStreamingEdgeInputNode();
((BLangPatternStreamingEdgeInput) patternStreamingEdgeInputNode).pos = pos;
this.patternStreamingEdgeInputStack.push(patternStreamingEdgeInputNode);
}
void endPatternStreamingEdgeInputNode(DiagnosticPos pos, Set<Whitespace> ws, String alias) {
PatternStreamingEdgeInputNode patternStreamingEdgeInputNode = this.patternStreamingEdgeInputStack.peek();
((BLangPatternStreamingEdgeInput) patternStreamingEdgeInputNode).pos = pos;
patternStreamingEdgeInputNode.addWS(ws);
if (exprNodeStack.size() == 2) {
patternStreamingEdgeInputNode.setExpression(exprNodeStack.pop());
patternStreamingEdgeInputNode.setStreamReference(exprNodeStack.pop());
} else if (exprNodeStack.size() == 1) {
patternStreamingEdgeInputNode.setStreamReference(exprNodeStack.pop());
}
if (!whereClauseStack.empty()) {
patternStreamingEdgeInputNode.setWhereClause(whereClauseStack.pop());
}
patternStreamingEdgeInputNode.setAliasIdentifier(alias);
}
void startPatternStreamingInputNode(DiagnosticPos pos) {
PatternStreamingInputNode patternStreamingInputNode = TreeBuilder.createPatternStreamingInputNode();
((BLangPatternStreamingInput) patternStreamingInputNode).pos = pos;
this.patternStreamingInputStack.push(patternStreamingInputNode);
}
void endPatternStreamingInputNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isFollowedBy,
boolean enclosedInParenthesis, boolean andWithNotAvailable,
boolean forWithNotAvailable, boolean onlyAndAvailable,
boolean onlyOrAvailable, boolean commaSeparated,
String timeDurationValue, String timeScale) {
if (!this.patternStreamingInputStack.empty()) {
PatternStreamingInputNode patternStreamingInputNode = this.patternStreamingInputStack.pop();
((BLangPatternStreamingInput) patternStreamingInputNode).pos = pos;
patternStreamingInputNode.addWS(ws);
if (isFollowedBy) {
processFollowedByPattern(patternStreamingInputNode);
}
if (enclosedInParenthesis) {
processEnclosedPattern(patternStreamingInputNode);
}
if (andWithNotAvailable) {
processNegationPattern(patternStreamingInputNode);
}
if (onlyAndAvailable) {
processPatternWithAndCondition(patternStreamingInputNode);
}
if (onlyOrAvailable) {
processPatternWithOrCondition(patternStreamingInputNode);
}
if (forWithNotAvailable) {
processNegationPatternWithTimeDuration(patternStreamingInputNode, timeDurationValue, timeScale);
}
if (commaSeparated) {
processCommaSeparatedSequence(patternStreamingInputNode);
}
if (!(isFollowedBy || enclosedInParenthesis || forWithNotAvailable ||
onlyAndAvailable || onlyOrAvailable || andWithNotAvailable || commaSeparated)) {
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
}
if (this.patternStreamingInputStack.empty()) {
this.patternStreamingInputStack.push(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = null;
}
}
private void processCommaSeparatedSequence(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setCommaSeparated(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.setPatternStreamingInput(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processNegationPatternWithTimeDuration(PatternStreamingInputNode patternStreamingInputNode,
String timeDurationValue, String timeScale) {
patternStreamingInputNode.setForWithNot(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.setTimeDurationValue(timeDurationValue);
patternStreamingInputNode.setTimeScale(timeScale);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processPatternWithOrCondition(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setOrOnly(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processPatternWithAndCondition(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setAndOnly(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processNegationPattern(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setAndWithNot(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processEnclosedPattern(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setEnclosedInParenthesis(true);
patternStreamingInputNode.setPatternStreamingInput(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processFollowedByPattern(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setFollowedBy(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.setPatternStreamingInput(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
void startStreamingQueryStatementNode(DiagnosticPos pos) {
StreamingQueryStatementNode streamingQueryStatementNode = TreeBuilder.createStreamingQueryStatementNode();
((BLangStreamingQueryStatement) streamingQueryStatementNode).pos = pos;
this.streamingQueryStatementStack.push(streamingQueryStatementNode);
}
void endStreamingQueryStatementNode(DiagnosticPos pos, Set<Whitespace> ws) {
StreamingQueryStatementNode streamingQueryStatementNode = this.streamingQueryStatementStack.peek();
((BLangStreamingQueryStatement) streamingQueryStatementNode).pos = pos;
streamingQueryStatementNode.addWS(ws);
if (!streamingInputStack.empty()) {
streamingQueryStatementNode.setStreamingInput(streamingInputStack.pop());
if (!joinStreamingInputsStack.empty()) {
streamingQueryStatementNode.setJoinStreamingInput(joinStreamingInputsStack.pop());
}
} else if (!patternClauseStack.empty()) {
streamingQueryStatementNode.setPatternClause(patternClauseStack.pop());
}
if (!selectClausesStack.empty()) {
streamingQueryStatementNode.setSelectClause(selectClausesStack.pop());
} else {
SelectClauseNode selectClauseNode = new BLangSelectClause();
selectClauseNode.setSelectAll(true);
streamingQueryStatementNode.setSelectClause(selectClauseNode);
}
if (!orderByClauseStack.empty()) {
streamingQueryStatementNode.setOrderByClause(orderByClauseStack.pop());
}
if (!outputRateLimitStack.empty()) {
streamingQueryStatementNode.setOutputRateLimitNode(outputRateLimitStack.pop());
}
streamingQueryStatementNode.setStreamingAction(streamActionNodeStack.pop());
}
void startOutputRateLimitNode(DiagnosticPos pos) {
OutputRateLimitNode outputRateLimit = TreeBuilder.createOutputRateLimitNode();
((BLangOutputRateLimit) outputRateLimit).pos = pos;
this.outputRateLimitStack.push(outputRateLimit);
}
void endOutputRateLimitNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isSnapshotOutputRateLimit,
boolean isFirst, boolean isLast, boolean isAll, String timeScale,
String rateLimitValue) {
OutputRateLimitNode outputRateLimit = this.outputRateLimitStack.peek();
((BLangOutputRateLimit) outputRateLimit).pos = pos;
outputRateLimit.addWS(ws);
outputRateLimit.setSnapshot(isSnapshotOutputRateLimit);
outputRateLimit.setOutputRateType(isFirst, isLast, isAll);
outputRateLimit.setTimeScale(timeScale);
outputRateLimit.setRateLimitValue(rateLimitValue);
}
void startWithinClause(DiagnosticPos pos) {
WithinClause withinClause = TreeBuilder.createWithinClause();
((BLangWithinClause) withinClause).pos = pos;
this.withinClauseStack.push(withinClause);
}
void endWithinClause(DiagnosticPos pos, Set<Whitespace> ws, String timeDurationValue, String timeScale) {
WithinClause withinClause = this.withinClauseStack.peek();
((BLangWithinClause) withinClause).pos = pos;
withinClause.addWS(ws);
withinClause.setTimeDurationValue(timeDurationValue);
withinClause.setTimeScale(timeScale);
}
void startPatternClause(DiagnosticPos pos) {
PatternClause patternClause = TreeBuilder.createPatternClause();
((BLangPatternClause) patternClause).pos = pos;
this.patternClauseStack.push(patternClause);
}
void endPatternClause(boolean isForEvents, boolean isWithinClauseAvailable, DiagnosticPos pos,
Set<Whitespace> ws) {
PatternClause patternClause = this.patternClauseStack.peek();
((BLangPatternClause) patternClause).pos = pos;
patternClause.addWS(ws);
patternClause.setForAllEvents(isForEvents);
patternClause.setPatternStreamingInputNode(this.patternStreamingInputStack.pop());
if (isWithinClauseAvailable) {
patternClause.setWithinClause(this.withinClauseStack.pop());
}
}
void startForeverNode(DiagnosticPos pos) {
ForeverNode foreverNode = TreeBuilder.createForeverNode();
((BLangForever) foreverNode).pos = pos;
this.foreverNodeStack.push(foreverNode);
}
void endForeverNode(DiagnosticPos pos, Set<Whitespace> ws) {
ForeverNode foreverNode = this.foreverNodeStack.pop();
((BLangForever) foreverNode).pos = pos;
foreverNode.addWS(ws);
if (!this.varListStack.empty()) {
this.varListStack.pop().forEach(param -> foreverNode.addParameter((SimpleVariableNode) param));
}
Collections.reverse(streamingQueryStatementStack);
while (!streamingQueryStatementStack.empty()) {
foreverNode.addStreamingQueryStatement(streamingQueryStatementStack.pop());
}
addStmtToCurrentBlock(foreverNode);
if (!foreverNode.isSiddhiRuntimeEnabled()) {
List<String> nameComps = getPackageNameComps(Names.STREAMS_MODULE.value);
addImportPackageDeclaration(pos, null, Names.STREAMS_ORG.value, nameComps, null,
nameComps.get(nameComps.size() - 1));
}
}
BLangLambdaFunction getScopesFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean bodyExists, String name) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
function.flagSet.add(Flag.PUBLIC);
function.flagSet.add(Flag.LAMBDA);
if (!bodyExists) {
function.body = null;
}
BLangIdentifier nameId = new BLangIdentifier();
nameId.setValue(Names.GEN_VAR_PREFIX + name);
function.name = nameId;
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = pos;
typeNode.typeKind = TypeKind.NIL;
function.returnTypeNode = typeNode;
function.receiver = null;
BLangLambdaFunction lambda = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambda.function = function;
return lambda;
}
public void addTypeReference(DiagnosticPos currentPos, Set<Whitespace> ws) {
TypeNode typeRef = typeNodeStack.pop();
typeRef.addWS(ws);
BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeNodeStack.peek();
structureTypeNode.addTypeReference(typeRef);
}
public void createTypeTestExpression(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = (BLangExpression) this.exprNodeStack.pop();
typeTestExpr.typeNode = (BLangType) this.typeNodeStack.pop();
typeTestExpr.pos = pos;
typeTestExpr.addWS(ws);
addExpressionNode(typeTestExpr);
}
void handleWait(DiagnosticPos currentPos, Set<Whitespace> ws) {
BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode();
waitExpr.exprList = Collections.singletonList((BLangExpression) this.exprNodeStack.pop());
waitExpr.pos = currentPos;
waitExpr.addWS(ws);
addExpressionNode(waitExpr);
}
void startWaitForAll() {
BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode();
waitCollectionStack.push(bLangWaitForAll);
}
void handleWaitForAll(DiagnosticPos pos, Set<Whitespace> ws) {
BLangWaitForAllExpr waitForAllExpr = waitCollectionStack.pop();
waitForAllExpr.pos = pos;
waitForAllExpr.addWS(ws);
addExpressionNode(waitForAllExpr);
}
void addKeyValueToWaitForAll(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean containsExpr) {
BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode();
keyValue.addWS(ws);
keyValue.pos = pos;
BLangIdentifier key = (BLangIdentifier) TreeBuilder.createIdentifierNode();
key.setLiteral(false);
key.setValue(identifier);
keyValue.key = key;
if (containsExpr) {
keyValue.valueExpr = (BLangExpression) exprNodeStack.pop();
} else {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = pos;
varRef.variableName = key;
varRef.addWS(ws);
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.keyExpr = varRef;
}
waitCollectionStack.peek().keyValuePairs.add(keyValue);
}
} | class BLangPackageBuilder {
private CompilationUnitNode compUnit;
private Stack<BLangNameReference> nameReferenceStack = new Stack<>();
private Stack<TypeNode> typeNodeStack = new Stack<>();
private Stack<BlockNode> blockNodeStack = new Stack<>();
private Stack<BLangVariable> varStack = new Stack<>();
private Stack<List<BLangVariable>> varListStack = new Stack<>();
private Stack<List<BLangRecordVariableKeyValue>> recordVarListStack = new Stack<>();
private Stack<List<BLangRecordVarRefKeyValue>> recordVarRefListStack = new Stack<>();
private Stack<InvokableNode> invokableNodeStack = new Stack<>();
private Stack<ExpressionNode> exprNodeStack = new Stack<>();
private Stack<List<ExpressionNode>> exprNodeListStack = new Stack<>();
private Stack<Set<Whitespace>> commaWsStack = new Stack<>();
private Stack<Set<Whitespace>> invocationWsStack = new Stack<>();
private Stack<BLangRecordLiteral> recordLiteralNodes = new Stack<>();
private Stack<BLangTableLiteral> tableLiteralNodes = new Stack<>();
private Stack<BLangWaitForAllExpr> waitCollectionStack = new Stack<>();
private Stack<BLangTryCatchFinally> tryCatchFinallyNodesStack = new Stack<>();
private Stack<AnnotationNode> annotationStack = new Stack<>();
private Stack<MarkdownDocumentationNode> markdownDocumentationStack = new Stack<>();
private Stack<DeprecatedNode> deprecatedAttachmentStack = new Stack<>();
private Stack<AnnotationAttachmentNode> annotAttachmentStack = new Stack<>();
private Stack<IfNode> ifElseStatementStack = new Stack<>();
private Stack<TransactionNode> transactionNodeStack = new Stack<>();
private Stack<ForkJoinNode> forkJoinNodesStack = new Stack<>();
private Stack<ServiceNode> serviceNodeStack = new Stack<>();
private Stack<XMLAttributeNode> xmlAttributeNodeStack = new Stack<>();
private Stack<AttachPoint> attachPointStack = new Stack<>();
private Stack<OrderByNode> orderByClauseStack = new Stack<>();
private Stack<OrderByVariableNode> orderByVariableStack = new Stack<>();
private Stack<LimitNode> limitClauseStack = new Stack<>();
private Stack<GroupByNode> groupByClauseStack = new Stack<>();
private Stack<HavingNode> havingClauseStack = new Stack<>();
private Stack<WhereNode> whereClauseStack = new Stack<>();
private Stack<SelectExpressionNode> selectExpressionsStack = new Stack<>();
private Stack<List<SelectExpressionNode>> selectExpressionsListStack = new Stack<>();
private Stack<SelectClauseNode> selectClausesStack = new Stack<>();
private Stack<WindowClauseNode> windowClausesStack = new Stack<>();
private Stack<StreamingInput> streamingInputStack = new Stack<>();
private Stack<JoinStreamingInput> joinStreamingInputsStack = new Stack<>();
private Stack<TableQuery> tableQueriesStack = new Stack<>();
private Stack<SetAssignmentNode> setAssignmentStack = new Stack<>();
private Stack<List<SetAssignmentNode>> setAssignmentListStack = new Stack<>();
private Stack<StreamActionNode> streamActionNodeStack = new Stack<>();
private Stack<PatternStreamingEdgeInputNode> patternStreamingEdgeInputStack = new Stack<>();
private Stack<PatternStreamingInputNode> patternStreamingInputStack = new Stack<>();
private Stack<StreamingQueryStatementNode> streamingQueryStatementStack = new Stack<>();
private Stack<ForeverNode> foreverNodeStack = new Stack<>();
private Stack<OutputRateLimitNode> outputRateLimitStack = new Stack<>();
private Stack<WithinClause> withinClauseStack = new Stack<>();
private Stack<PatternClause> patternClauseStack = new Stack<>();
private Set<BLangImportPackage> imports = new HashSet<>();
private List<VariableDefinitionNode> defaultableParamsList = new ArrayList<>();
private Stack<SimpleVariableNode> restParamStack = new Stack<>();
private Deque<BLangMatch> matchStmtStack;
private PatternStreamingInputNode recentStreamingPatternInputNode;
private Stack<Set<Whitespace>> operatorWs = new Stack<>();
private Stack<Set<Whitespace>> objectFieldBlockWs = new Stack<>();
private Stack<Set<Whitespace>> finiteTypeWsStack = new Stack<>();
private BLangAnonymousModelHelper anonymousModelHelper;
private CompilerOptions compilerOptions;
private BLangDiagnosticLog dlog;
private static final String IDENTIFIER_LITERAL_PREFIX = "^\"";
private static final String IDENTIFIER_LITERAL_SUFFIX = "\"";
public BLangPackageBuilder(CompilerContext context, CompilationUnitNode compUnit) {
this.dlog = BLangDiagnosticLog.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.compilerOptions = CompilerOptions.getInstance(context);
this.compUnit = compUnit;
}
void addAttachPoint(AttachPoint attachPoint, Set<Whitespace> ws) {
attachPointStack.push(attachPoint);
this.annotationStack.peek().addWS(ws);
}
void addValueType(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.addWS(ws);
typeNode.pos = pos;
typeNode.typeKind = (TreeUtils.stringToTypeKind(typeName.replaceAll("\\s+", "")));
addType(typeNode);
}
void addUnionType(DiagnosticPos pos, Set<Whitespace> ws) {
BLangType rhsTypeNode = (BLangType) this.typeNodeStack.pop();
BLangType lhsTypeNode = (BLangType) this.typeNodeStack.pop();
BLangUnionTypeNode unionTypeNode;
if (rhsTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
unionTypeNode = (BLangUnionTypeNode) rhsTypeNode;
unionTypeNode.memberTypeNodes.add(0, lhsTypeNode);
unionTypeNode.addWS(ws);
this.typeNodeStack.push(unionTypeNode);
return;
} else {
unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(lhsTypeNode);
unionTypeNode.memberTypeNodes.add(rhsTypeNode);
}
unionTypeNode.pos = pos;
unionTypeNode.addWS(ws);
this.typeNodeStack.push(unionTypeNode);
}
void addTupleType(DiagnosticPos pos, Set<Whitespace> ws, int members) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
for (int i = 0; i < members; i++) {
final BLangType member = (BLangType) this.typeNodeStack.pop();
tupleTypeNode.memberTypeNodes.add(0, member);
}
tupleTypeNode.pos = pos;
tupleTypeNode.addWS(ws);
this.typeNodeStack.push(tupleTypeNode);
}
void addRecordType(DiagnosticPos pos, Set<Whitespace> ws, boolean isFieldAnalyseRequired, boolean isAnonymous,
boolean sealed, boolean hasRestField) {
BLangType restFieldType = null;
if (hasRestField && !sealed) {
restFieldType = (BLangType) this.typeNodeStack.pop();
}
BLangRecordTypeNode recordTypeNode = populateRecordTypeNode(pos, ws, isAnonymous);
recordTypeNode.isFieldAnalyseRequired = isFieldAnalyseRequired;
recordTypeNode.sealed = sealed;
recordTypeNode.restFieldType = restFieldType;
if (!isAnonymous) {
addType(recordTypeNode);
return;
}
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = recordTypeNode;
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
addType(createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name));
}
private BLangRecordTypeNode populateRecordTypeNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isAnonymous) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeNodeStack.pop();
recordTypeNode.pos = pos;
recordTypeNode.addWS(ws);
recordTypeNode.isAnonymous = isAnonymous;
this.varListStack.pop().forEach(variableNode -> {
recordTypeNode.addField((SimpleVariableNode) variableNode);
});
return recordTypeNode;
}
void addFieldVariable(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean exprAvailable, int annotCount, boolean isPrivate, boolean isOptional) {
BLangSimpleVariable field = addSimpleVar(pos, ws, identifier, exprAvailable, annotCount);
if (!isPrivate) {
field.flagSet.add(Flag.PUBLIC);
}
if (isOptional) {
field.flagSet.add(Flag.OPTIONAL);
} else if (!exprAvailable) {
field.flagSet.add(Flag.REQUIRED);
}
}
void addFieldVariable(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean exprAvailable, boolean deprecatedDocExit,
int annotCount, boolean isPrivate, boolean isPublic) {
BLangSimpleVariable field = addSimpleVar(pos, ws, identifier, exprAvailable, annotCount);
attachAnnotations(field, annotCount);
if (deprecatedDocExit) {
attachDeprecatedNode(field);
}
if (isPublic) {
field.flagSet.add(Flag.PUBLIC);
} else if (isPrivate) {
field.flagSet.add(Flag.PRIVATE);
}
}
void addArrayType(DiagnosticPos pos, Set<Whitespace> ws, int dimensions, int[] sizes) {
BLangType eType = (BLangType) this.typeNodeStack.pop();
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.addWS(ws);
arrayTypeNode.pos = pos;
arrayTypeNode.elemtype = eType;
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes;
addType(arrayTypeNode);
}
void markTypeNodeAsNullable(Set<Whitespace> ws) {
BLangType typeNode = (BLangType) this.typeNodeStack.peek();
typeNode.addWS(ws);
typeNode.nullable = true;
}
void markTypeNodeAsGrouped(Set<Whitespace> ws) {
BLangType typeNode = (BLangType) this.typeNodeStack.peek();
typeNode.addWS(ws);
typeNode.grouped = true;
}
void addUserDefineType(Set<Whitespace> ws) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangUserDefinedType userDefinedType = createUserDefinedType(nameReference.pos, ws,
(BLangIdentifier) nameReference.pkgAlias, (BLangIdentifier) nameReference.name);
userDefinedType.addWS(nameReference.ws);
addType(userDefinedType);
}
void addBuiltInReferenceType(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(typeName);
refType.pos = pos;
refType.addWS(ws);
addType(refType);
}
void addErrorType(DiagnosticPos pos, Set<Whitespace> ws, boolean isReasonTypeExists, boolean isDetailsTypeExists) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorType.pos = pos;
errorType.addWS(ws);
if (isDetailsTypeExists) {
errorType.detailType = (BLangType) this.typeNodeStack.pop();
}
if (isReasonTypeExists) {
errorType.reasonType = (BLangType) this.typeNodeStack.pop();
}
addType(errorType);
}
void addConstraintType(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangUserDefinedType constraintType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
constraintType.pos = pos;
constraintType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
constraintType.typeName = (BLangIdentifier) nameReference.name;
constraintType.addWS(nameReference.ws);
Set<Whitespace> refTypeWS = removeNthFromLast(ws, 2);
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(typeName);
refType.pos = pos;
refType.addWS(refTypeWS);
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = constraintType;
constrainedType.pos = pos;
constrainedType.addWS(ws);
addType(constrainedType);
}
void addConstraintTypeWithTypeName(DiagnosticPos pos, Set<Whitespace> ws, String typeName) {
Set<Whitespace> refTypeWS = removeNthFromLast(ws, 2);
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(typeName);
refType.pos = pos;
refType.addWS(refTypeWS);
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = (BLangType) this.typeNodeStack.pop();
constrainedType.pos = pos;
constrainedType.addWS(ws);
addType(constrainedType);
}
void addEndpointType(DiagnosticPos pos, Set<Whitespace> ws) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangUserDefinedType constraintType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
constraintType.pos = pos;
constraintType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
constraintType.typeName = (BLangIdentifier) nameReference.name;
constraintType.addWS(nameReference.ws);
addType(constraintType);
}
void addFunctionType(DiagnosticPos pos, Set<Whitespace> ws, boolean paramsAvail,
boolean retParamsAvail) {
BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode();
functionTypeNode.pos = pos;
functionTypeNode.returnsKeywordExists = true;
if (retParamsAvail) {
functionTypeNode.addWS(this.varStack.peek().getWS());
functionTypeNode.returnTypeNode = this.varStack.pop().getTypeNode();
} else {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = pos;
nilTypeNode.typeKind = TypeKind.NIL;
functionTypeNode.returnTypeNode = nilTypeNode;
}
if (paramsAvail) {
functionTypeNode.addWS(commaWsStack.pop());
this.varListStack.pop().forEach(v -> functionTypeNode.params.add(v));
}
functionTypeNode.addWS(ws);
addType(functionTypeNode);
}
private void addType(TypeNode typeNode) {
this.typeNodeStack.push(typeNode);
}
void addNameReference(DiagnosticPos currentPos, Set<Whitespace> ws, String pkgName, String name) {
IdentifierNode pkgNameNode = createIdentifier(pkgName);
IdentifierNode nameNode = createIdentifier(name);
nameReferenceStack.push(new BLangNameReference(currentPos, ws, pkgNameNode, nameNode));
}
void startVarList() {
this.varListStack.push(new ArrayList<>());
}
void startFunctionDef(int annotCount) {
FunctionNode functionNode = TreeBuilder.createFunctionNode();
attachAnnotations(functionNode, annotCount);
attachMarkdownDocumentations(functionNode);
attachDeprecatedNode(functionNode);
this.invokableNodeStack.push(functionNode);
}
void startObjectFunctionDef() {
FunctionNode functionNode = TreeBuilder.createFunctionNode();
this.invokableNodeStack.push(functionNode);
}
void startBlock() {
this.blockNodeStack.push(TreeBuilder.createBlockNode());
}
private IdentifierNode createIdentifier(String value) {
IdentifierNode node = TreeBuilder.createIdentifierNode();
if (value == null) {
return node;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX) && value.endsWith(IDENTIFIER_LITERAL_SUFFIX)) {
value = StringEscapeUtils.unescapeJava(value);
node.setValue(value.substring(2, value.length() - 1));
node.setLiteral(true);
} else {
node.setValue(value);
node.setLiteral(false);
}
return node;
}
BLangSimpleVariable addSimpleVar(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier,
boolean exprAvailable,
int annotCount) {
BLangSimpleVariable var = (BLangSimpleVariable) this.generateBasicVarNode(pos, ws, identifier, exprAvailable);
attachAnnotations(var, annotCount);
var.pos = pos;
if (this.varListStack.empty()) {
this.varStack.push(var);
} else {
this.varListStack.peek().add(var);
}
return var;
}
BLangVariable addBindingPatternMemberVariable(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier) {
BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
memberVar.pos = pos;
IdentifierNode name = this.createIdentifier(identifier);
memberVar.setName(name);
memberVar.addWS(ws);
this.varStack.push(memberVar);
return memberVar;
}
void addTupleVariable(DiagnosticPos pos, Set<Whitespace> ws, int members) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode();
tupleVariable.pos = pos;
tupleVariable.addWS(ws);
for (int i = 0; i < members; i++) {
final BLangVariable member = this.varStack.pop();
tupleVariable.memberVariables.add(0, member);
}
this.varStack.push(tupleVariable);
}
void addTupleVariableReference(DiagnosticPos pos, Set<Whitespace> ws, int members) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode();
tupleVarRef.pos = pos;
tupleVarRef.addWS(ws);
for (int i = 0; i < members; i++) {
final BLangExpression expr = (BLangExpression) this.exprNodeStack.pop();
tupleVarRef.expressions.add(0, expr);
}
this.exprNodeStack.push(tupleVarRef);
}
void startRecordVariableList() {
recordVarListStack.push(new ArrayList<>());
}
void startRecordVariableReferenceList() {
recordVarRefListStack.push(new ArrayList<>());
}
void addRecordVariable(DiagnosticPos pos, Set<Whitespace> ws, RestBindingPatternState restBindingPattern) {
BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode();
recordVariable.pos = pos;
recordVariable.addWS(ws);
recordVariable.variableList = this.recordVarListStack.pop();
switch (restBindingPattern) {
case OPEN_REST_BINDING_PATTERN:
recordVariable.restParam = this.varStack.pop();
break;
case CLOSED_REST_BINDING_PATTERN:
recordVariable.isClosed = true;
break;
case NO_BINDING_PATTERN:
break;
}
this.varStack.push(recordVariable);
}
void addRecordVariableReference(DiagnosticPos pos, Set<Whitespace> ws, RestBindingPatternState restBindingPattern) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode();
recordVarRef.pos = pos;
recordVarRef.addWS(ws);
switch (restBindingPattern) {
case OPEN_REST_BINDING_PATTERN:
recordVarRef.restParam = this.exprNodeStack.pop();
break;
case CLOSED_REST_BINDING_PATTERN:
recordVarRef.isClosed = true;
break;
case NO_BINDING_PATTERN:
break;
}
recordVarRef.recordRefFields = this.recordVarRefListStack.pop();
this.exprNodeStack.push(recordVarRef);
}
void addFieldBindingMemberVar(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean bindingPattern) {
BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue();
recordKeyValue.key = (BLangIdentifier) this.createIdentifier(identifier);
if (!bindingPattern) {
addBindingPatternMemberVariable(pos, ws, identifier);
}
recordKeyValue.valueBindingPattern = this.varStack.pop();
this.recordVarListStack.peek().add(recordKeyValue);
}
void addFieldRefBindingMemberVar(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean bindingPattern) {
BLangExpression expression;
if (!bindingPattern) {
addNameReference(pos, ws, null, identifier);
createSimpleVariableReference(pos, ws);
}
expression = (BLangExpression) this.exprNodeStack.pop();
BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue();
keyValue.variableName = (BLangIdentifier) createIdentifier(identifier);
keyValue.variableReference = expression;
this.recordVarRefListStack.peek().add(keyValue);
}
public BLangVariable addVarWithoutType(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier,
boolean exprAvailable,
int annotCount) {
BLangVariable var = (BLangVariable) this.generateBasicVarNodeWithoutType(pos, ws, identifier, exprAvailable);
attachAnnotations(var, annotCount);
var.pos = pos;
if (this.varListStack.empty()) {
this.varStack.push(var);
} else {
this.varListStack.peek().add(var);
}
return var;
}
public void endFormalParameterList(Set<Whitespace> ws) {
this.commaWsStack.push(ws);
}
void addReturnParam(DiagnosticPos pos,
Set<Whitespace> ws,
int annotCount) {
BLangSimpleVariable var = (BLangSimpleVariable) this.generateBasicVarNode(pos, ws, null, false);
attachAnnotations(var, annotCount);
var.pos = pos;
this.varStack.push(var);
}
void endCallableUnitSignature(DiagnosticPos pos,
Set<Whitespace> ws,
String identifier,
DiagnosticPos identifierPos,
boolean paramsAvail,
boolean retParamsAvail,
boolean restParamAvail) {
InvokableNode invNode = this.invokableNodeStack.peek();
BLangIdentifier identifierNode = (BLangIdentifier) this.createIdentifier(identifier);
identifierNode.pos = identifierPos;
invNode.setName(identifierNode);
invNode.addWS(ws);
BLangType returnTypeNode;
if (retParamsAvail) {
BLangSimpleVariable varNode = (BLangSimpleVariable) this.varStack.pop();
returnTypeNode = varNode.getTypeNode();
invNode.addWS(varNode.getWS());
varNode.getAnnotationAttachments().forEach(invNode::addReturnTypeAnnotationAttachment);
} else {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.pos = pos;
nillTypeNode.typeKind = TypeKind.NIL;
returnTypeNode = nillTypeNode;
}
invNode.setReturnTypeNode(returnTypeNode);
if (paramsAvail) {
this.varListStack.pop().forEach(variableNode -> {
invNode.addParameter((SimpleVariableNode) variableNode);
});
this.defaultableParamsList.forEach(variableDef -> {
BLangSimpleVariableDef varDef = (BLangSimpleVariableDef) variableDef;
invNode.addDefaultableParameter(varDef);
});
this.defaultableParamsList = new ArrayList<>();
if (restParamAvail) {
invNode.setRestParameter(this.restParamStack.pop());
}
invNode.addWS(this.commaWsStack.pop());
}
}
void startLambdaFunctionDef(PackageID pkgID) {
startFunctionDef(0);
BLangFunction lambdaFunction = (BLangFunction) this.invokableNodeStack.peek();
lambdaFunction.setName(createIdentifier(anonymousModelHelper.getNextAnonymousFunctionKey(pkgID)));
lambdaFunction.addFlag(Flag.LAMBDA);
}
void addLambdaFunctionDef(DiagnosticPos pos,
Set<Whitespace> ws,
boolean paramsAvail,
boolean retParamsAvail,
boolean restParamAvail) {
BLangFunction lambdaFunction = (BLangFunction) this.invokableNodeStack.peek();
lambdaFunction.pos = pos;
endCallableUnitSignature(pos, ws, lambdaFunction.getName().value, pos, paramsAvail, retParamsAvail,
restParamAvail);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = lambdaFunction;
lambdaExpr.pos = pos;
addExpressionNode(lambdaExpr);
endFunctionDef(pos, null, false, false, false, true, false, true);
}
void addArrowFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, PackageID pkgID) {
BLangArrowFunction arrowFunctionNode = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode();
arrowFunctionNode.pos = pos;
arrowFunctionNode.addWS(ws);
arrowFunctionNode.functionName = createIdentifier(anonymousModelHelper.getNextAnonymousFunctionKey(pkgID));
varListStack.pop().forEach(var -> arrowFunctionNode.params.add((BLangSimpleVariable) var));
arrowFunctionNode.expression = (BLangExpression) this.exprNodeStack.pop();
addExpressionNode(arrowFunctionNode);
}
void markLastInvocationAsAsync(DiagnosticPos pos) {
final ExpressionNode expressionNode = this.exprNodeStack.peek();
if (expressionNode.getKind() == NodeKind.INVOCATION) {
((BLangInvocation) this.exprNodeStack.peek()).async = true;
} else {
dlog.error(pos, DiagnosticCode.START_REQUIRE_INVOCATION);
}
}
void addSimpleVariableDefStatement(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean isFinal,
boolean isDeclaredWithVar, boolean isExpressionAvailable) {
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
BLangSimpleVariableDef varDefNode = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
var.pos = pos;
var.addWS(ws);
var.setName(this.createIdentifier(identifier));
if (isFinal) {
markVariableAsFinal(var);
}
if (isDeclaredWithVar) {
var.isDeclaredWithVar = true;
} else {
var.setTypeNode(this.typeNodeStack.pop());
}
if (isExpressionAvailable) {
var.setInitialExpression(this.exprNodeStack.pop());
}
varDefNode.pos = pos;
varDefNode.setVariable(var);
varDefNode.addWS(ws);
addStmtToCurrentBlock(varDefNode);
}
void addTupleVariableDefStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean isFinal,
boolean isDeclaredWithVar) {
BLangTupleVariable var = (BLangTupleVariable) this.varStack.pop();
if (isFinal) {
markVariableAsFinal(var);
}
BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode();
Set<Whitespace> wsOfSemiColon = removeNthFromLast(ws, 0);
var.setInitialExpression(this.exprNodeStack.pop());
varDefNode.pos = pos;
varDefNode.setVariable(var);
varDefNode.addWS(wsOfSemiColon);
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(this.typeNodeStack.pop());
}
addStmtToCurrentBlock(varDefNode);
}
void addRecordVariableDefStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean isFinal,
boolean isDeclaredWithVar) {
BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode();
BLangRecordVariable var = (BLangRecordVariable) this.varStack.pop();
if (isFinal) {
markVariableAsFinal(var);
}
var.setInitialExpression(this.exprNodeStack.pop());
varDefNode.pos = pos;
varDefNode.setVariable(var);
varDefNode.addWS(ws);
varDefNode.var = var;
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(this.typeNodeStack.pop());
}
addStmtToCurrentBlock(varDefNode);
}
void addTypeInitExpression(DiagnosticPos pos, Set<Whitespace> ws, String initName, boolean typeAvailable,
boolean exprAvailable) {
BLangTypeInit objectInitNode = (BLangTypeInit) TreeBuilder.createObjectInitNode();
objectInitNode.pos = pos;
objectInitNode.addWS(ws);
if (typeAvailable) {
objectInitNode.userDefinedType = (BLangUserDefinedType) typeNodeStack.pop();
}
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
invocationNode.addWS(ws);
if (exprAvailable) {
List<ExpressionNode> exprNodes = exprNodeListStack.pop();
Set<Whitespace> cws = commaWsStack.pop();
exprNodes.forEach(exprNode -> {
invocationNode.argExprs.add((BLangExpression) exprNode);
objectInitNode.argsExpr.add((BLangExpression) exprNode);
});
invocationNode.addWS(cws);
objectInitNode.addWS(cws);
}
IdentifierNode pkgNameNode = TreeBuilder.createIdentifierNode();
IdentifierNode nameNode = createIdentifier(initName);
BLangNameReference nameReference = new BLangNameReference(pos, ws, pkgNameNode, nameNode);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.addWS(nameReference.ws);
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
objectInitNode.objectInitInvocation = invocationNode;
this.addExpressionNode(objectInitNode);
}
private void markVariableAsFinal(BLangVariable variable) {
variable.flagSet.add(Flag.FINAL);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
((BLangTupleVariable) variable).memberVariables.parallelStream()
.forEach(this::markVariableAsFinal);
break;
case RECORD_VARIABLE:
((BLangRecordVariable) variable).variableList.parallelStream()
.map(BLangRecordVariableKeyValue::getValue)
.forEach(this::markVariableAsFinal);
break;
}
}
void addErrorConstructor(DiagnosticPos pos, Set<Whitespace> ws, boolean detailsExprAvailable) {
BLangErrorConstructorExpr errorConstExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorNode();
errorConstExpr.pos = pos;
errorConstExpr.addWS(ws);
if (detailsExprAvailable) {
errorConstExpr.detailsExpr = (BLangExpression) exprNodeStack.pop();
}
errorConstExpr.reasonExpr = (BLangExpression) exprNodeStack.pop();
this.addExpressionNode(errorConstExpr);
}
private void addStmtToCurrentBlock(StatementNode statement) {
this.blockNodeStack.peek().addStatement(statement);
}
void startTryCatchFinallyStmt() {
this.tryCatchFinallyNodesStack.push((BLangTryCatchFinally) TreeBuilder.createTryCatchFinallyNode());
startBlock();
}
void addTryClause(DiagnosticPos pos) {
BLangBlockStmt tryBlock = (BLangBlockStmt) this.blockNodeStack.pop();
tryBlock.pos = pos;
tryCatchFinallyNodesStack.peek().tryBody = tryBlock;
}
void startCatchClause() {
startBlock();
}
void addCatchClause(DiagnosticPos poc, Set<Whitespace> ws, String paramName) {
BLangSimpleVariable variableNode = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
variableNode.typeNode = (BLangType) this.typeNodeStack.pop();
variableNode.name = (BLangIdentifier) createIdentifier(paramName);
variableNode.pos = variableNode.typeNode.pos;
variableNode.addWS(removeNthFromLast(ws, 3));
BLangCatch catchNode = (BLangCatch) TreeBuilder.createCatchNode();
catchNode.pos = poc;
catchNode.addWS(ws);
catchNode.body = (BLangBlockStmt) this.blockNodeStack.pop();
catchNode.param = variableNode;
tryCatchFinallyNodesStack.peek().catchBlocks.add(catchNode);
}
void startFinallyBlock() {
startBlock();
}
void addFinallyBlock(DiagnosticPos poc, Set<Whitespace> ws) {
BLangBlockStmt blockNode = (BLangBlockStmt) this.blockNodeStack.pop();
BLangTryCatchFinally rootTry = tryCatchFinallyNodesStack.peek();
rootTry.finallyBody = blockNode;
rootTry.addWS(ws);
blockNode.pos = poc;
}
void addTryCatchFinallyStmt(DiagnosticPos poc, Set<Whitespace> ws) {
BLangTryCatchFinally stmtNode = tryCatchFinallyNodesStack.pop();
stmtNode.pos = poc;
stmtNode.addWS(ws);
addStmtToCurrentBlock(stmtNode);
}
void addThrowStmt(DiagnosticPos poc, Set<Whitespace> ws) {
ExpressionNode throwExpr = this.exprNodeStack.pop();
BLangThrow throwNode = (BLangThrow) TreeBuilder.createThrowNode();
throwNode.pos = poc;
throwNode.addWS(ws);
throwNode.expr = (BLangExpression) throwExpr;
addStmtToCurrentBlock(throwNode);
}
void addPanicStmt(DiagnosticPos poc, Set<Whitespace> ws) {
ExpressionNode errorExpr = this.exprNodeStack.pop();
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = poc;
panicNode.addWS(ws);
panicNode.expr = (BLangExpression) errorExpr;
addStmtToCurrentBlock(panicNode);
}
private void addExpressionNode(ExpressionNode expressionNode) {
this.exprNodeStack.push(expressionNode);
}
void addLiteralValue(DiagnosticPos pos, Set<Whitespace> ws, int typeTag, Object value) {
addLiteralValue(pos, ws, typeTag, value, String.valueOf(value));
}
void addLiteralValue(DiagnosticPos pos, Set<Whitespace> ws, int typeTag, Object value, String originalValue) {
BLangLiteral litExpr = (BLangLiteral) TreeBuilder.createLiteralExpression();
litExpr.addWS(ws);
litExpr.pos = pos;
litExpr.typeTag = typeTag;
litExpr.value = value;
litExpr.originalValue = originalValue;
addExpressionNode(litExpr);
}
void addArrayInitExpr(DiagnosticPos pos, Set<Whitespace> ws, boolean argsAvailable) {
List<ExpressionNode> argExprList;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralNode();
if (argsAvailable) {
arrayLiteral.addWS(commaWsStack.pop());
argExprList = exprNodeListStack.pop();
} else {
argExprList = new ArrayList<>(0);
}
arrayLiteral.exprs = argExprList.stream().map(expr -> (BLangExpression) expr).collect(Collectors.toList());
arrayLiteral.pos = pos;
arrayLiteral.addWS(ws);
addExpressionNode(arrayLiteral);
}
void addKeyValueRecord(Set<Whitespace> ws) {
BLangRecordKeyValue keyValue = (BLangRecordKeyValue) TreeBuilder.createRecordKeyValue();
keyValue.addWS(ws);
keyValue.valueExpr = (BLangExpression) exprNodeStack.pop();
keyValue.key = new BLangRecordKey((BLangExpression) exprNodeStack.pop());
recordLiteralNodes.peek().keyValuePairs.add(keyValue);
}
void addMapStructLiteral(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRecordLiteral recordTypeLiteralNode = recordLiteralNodes.pop();
recordTypeLiteralNode.pos = pos;
recordTypeLiteralNode.addWS(ws);
addExpressionNode(recordTypeLiteralNode);
}
void startTableLiteral() {
final BLangTableLiteral tableLiteral = (BLangTableLiteral) TreeBuilder.createTableLiteralNode();
tableLiteralNodes.push(tableLiteral);
}
void endTableColumnDefinition(Set<Whitespace> ws) {
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
tableLiteral.addWS(ws);
}
void addTableColumn(String columnName, DiagnosticPos pos, Set<Whitespace> ws) {
BLangTableLiteral.BLangTableColumn tableColumn = new BLangTableLiteral.BLangTableColumn(columnName);
tableColumn.pos = pos;
tableColumn.addWS(ws);
this.tableLiteralNodes.peek().columns.add(tableColumn);
}
void markPrimaryKeyColumn(String columnName) {
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
BLangTableLiteral.BLangTableColumn column = tableLiteral.getColumn(columnName);
if (column != null) {
column.flagSet.add(TableColumnFlag.PRIMARYKEY);
}
}
void endTableDataList(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
List<BLangTableLiteral.BLangTableColumn> keyNames = tableLiteralNodes.peek().columns;
List<ExpressionNode> recordValues = exprNodeListStack.pop();
if (keyNames.size() == recordValues.size()) {
int index = 0;
for (ExpressionNode expr : recordValues) {
BLangRecordKeyValue keyValue = (BLangRecordKeyValue) TreeBuilder.createRecordKeyValue();
keyValue.valueExpr = (BLangExpression) expr;
BLangSimpleVarRef keyExpr = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
keyExpr.pos = pos;
IdentifierNode identifierNode = TreeBuilder.createIdentifierNode();
identifierNode.setValue(keyNames.get(index).columnName);
keyExpr.variableName = (BLangIdentifier) identifierNode;
keyValue.key = new BLangRecordKey(keyExpr);
recordLiteral.keyValuePairs.add(keyValue);
++index;
}
recordLiteral.addWS(ws);
recordLiteral.pos = pos;
if (commaWsStack.size() > 0) {
recordLiteral.addWS(commaWsStack.pop());
}
this.tableLiteralNodes.peek().tableDataRows.add(recordLiteral);
}
}
void endTableDataArray(Set<Whitespace> ws) {
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
tableLiteral.addWS(ws);
}
void endTableDataRow(Set<Whitespace> ws) {
List<ExpressionNode> argExprList = exprNodeListStack.pop();
BLangTableLiteral tableLiteral = this.tableLiteralNodes.peek();
tableLiteral.addWS(ws);
if (commaWsStack.size() > 0) {
tableLiteral.addWS(commaWsStack.pop());
}
tableLiteral.tableDataRows = argExprList.stream().map(expr -> (BLangExpression) expr)
.collect(Collectors.toList());
}
void addTableLiteral(DiagnosticPos pos, Set<Whitespace> ws) {
final BLangTableLiteral tableLiteral = tableLiteralNodes.pop();
tableLiteral.addWS(ws);
tableLiteral.pos = pos;
addExpressionNode(tableLiteral);
}
void startMapStructLiteral() {
BLangRecordLiteral literalNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
recordLiteralNodes.push(literalNode);
}
void startExprNodeList() {
this.exprNodeListStack.push(new ArrayList<>());
}
void endExprNodeList(Set<Whitespace> ws, int exprCount) {
commaWsStack.push(ws);
List<ExpressionNode> exprList = exprNodeListStack.peek();
addExprToExprNodeList(exprList, exprCount);
}
private void addExprToExprNodeList(List<ExpressionNode> exprList, int n) {
if (exprNodeStack.empty()) {
throw new IllegalStateException("Expression stack cannot be empty in processing an ExpressionList");
}
ExpressionNode expr = exprNodeStack.pop();
if (n > 1) {
addExprToExprNodeList(exprList, n - 1);
}
exprList.add(expr);
}
void createSimpleVariableReference(DiagnosticPos pos, Set<Whitespace> ws) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder
.createSimpleVariableReferenceNode();
varRef.pos = pos;
varRef.addWS(ws);
varRef.addWS(nameReference.ws);
varRef.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
varRef.variableName = (BLangIdentifier) nameReference.name;
this.exprNodeStack.push(varRef);
}
void createFunctionInvocation(DiagnosticPos pos, Set<Whitespace> ws, boolean argsAvailable) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
invocationNode.addWS(ws);
if (argsAvailable) {
List<ExpressionNode> exprNodes = exprNodeListStack.pop();
exprNodes.forEach(exprNode -> invocationNode.argExprs.add((BLangExpression) exprNode));
invocationNode.addWS(commaWsStack.pop());
}
BLangNameReference nameReference = nameReferenceStack.pop();
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.addWS(this.invocationWsStack.pop());
invocationNode.addWS(nameReference.ws);
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
addExpressionNode(invocationNode);
}
void startInvocationNode(Set<Whitespace> ws) {
invocationWsStack.push(ws);
}
void createInvocationNode(DiagnosticPos pos, Set<Whitespace> ws, String invocation, boolean argsAvailable,
boolean safeNavigate) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
invocationNode.addWS(ws);
invocationNode.addWS(invocationWsStack.pop());
invocationNode.safeNavigate = safeNavigate;
if (argsAvailable) {
List<ExpressionNode> exprNodes = exprNodeListStack.pop();
exprNodes.forEach(exprNode -> invocationNode.argExprs.add((BLangExpression) exprNode));
invocationNode.addWS(commaWsStack.pop());
}
invocationNode.expr = (BLangExpression) exprNodeStack.pop();
invocationNode.name = (BLangIdentifier) createIdentifier(invocation);
invocationNode.pkgAlias = (BLangIdentifier) createIdentifier(null);
addExpressionNode(invocationNode);
}
void createActionInvocationNode(DiagnosticPos pos, Set<Whitespace> ws, boolean async) {
BLangInvocation invocationExpr = (BLangInvocation) exprNodeStack.pop();
invocationExpr.actionInvocation = true;
invocationExpr.pos = pos;
invocationExpr.addWS(ws);
invocationExpr.async = async;
invocationExpr.expr = (BLangExpression) exprNodeStack.pop();
exprNodeStack.push(invocationExpr);
}
void createFieldBasedAccessNode(DiagnosticPos pos, Set<Whitespace> ws, String fieldName,
FieldKind fieldType, boolean safeNavigate) {
BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
fieldBasedAccess.pos = pos;
fieldBasedAccess.addWS(ws);
fieldBasedAccess.field = (BLangIdentifier) createIdentifier(fieldName);
fieldBasedAccess.expr = (BLangVariableReference) exprNodeStack.pop();
fieldBasedAccess.fieldKind = fieldType;
fieldBasedAccess.safeNavigate = safeNavigate;
addExpressionNode(fieldBasedAccess);
}
void createIndexBasedAccessNode(DiagnosticPos pos, Set<Whitespace> ws) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = pos;
indexBasedAccess.addWS(ws);
indexBasedAccess.indexExpr = (BLangExpression) exprNodeStack.pop();
indexBasedAccess.expr = (BLangVariableReference) exprNodeStack.pop();
addExpressionNode(indexBasedAccess);
}
void createBracedOrTupleExpression(DiagnosticPos pos, Set<Whitespace> ws, int numberOfExpressions) {
final BLangBracedOrTupleExpr expr = (BLangBracedOrTupleExpr) TreeBuilder.createBracedOrTupleExpression();
expr.pos = pos;
expr.addWS(ws);
for (int i = 0; i < numberOfExpressions; i++) {
expr.expressions.add(0, (BLangExpression) exprNodeStack.pop());
}
addExpressionNode(expr);
}
void createBinaryExpr(DiagnosticPos pos, Set<Whitespace> ws, String operator) {
BLangBinaryExpr binaryExpressionNode = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpressionNode.pos = pos;
binaryExpressionNode.addWS(ws);
binaryExpressionNode.rhsExpr = (BLangExpression) exprNodeStack.pop();
binaryExpressionNode.lhsExpr = (BLangExpression) exprNodeStack.pop();
binaryExpressionNode.opKind = OperatorKind.valueFrom(operator);
addExpressionNode(binaryExpressionNode);
}
void createElvisExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode();
elvisExpr.pos = pos;
elvisExpr.addWS(ws);
elvisExpr.rhsExpr = (BLangExpression) exprNodeStack.pop();
elvisExpr.lhsExpr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(elvisExpr);
}
void createTypeAccessExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode();
typeAccessExpr.pos = pos;
typeAccessExpr.addWS(ws);
typeAccessExpr.typeNode = (BLangType) typeNodeStack.pop();
addExpressionNode(typeAccessExpr);
}
void createTypeConversionExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = pos;
typeConversionNode.addWS(ws);
typeConversionNode.typeNode = (BLangType) typeNodeStack.pop();
typeConversionNode.expr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(typeConversionNode);
}
void createUnaryExpr(DiagnosticPos pos, Set<Whitespace> ws, String operator) {
BLangUnaryExpr unaryExpressionNode = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
unaryExpressionNode.pos = pos;
unaryExpressionNode.addWS(ws);
unaryExpressionNode.expr = (BLangExpression) exprNodeStack.pop();
unaryExpressionNode.operator = OperatorKind.valueFrom(operator);
addExpressionNode(unaryExpressionNode);
}
void createTernaryExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode();
ternaryExpr.pos = pos;
ternaryExpr.addWS(ws);
ternaryExpr.elseExpr = (BLangExpression) exprNodeStack.pop();
ternaryExpr.thenExpr = (BLangExpression) exprNodeStack.pop();
ternaryExpr.expr = (BLangExpression) exprNodeStack.pop();
if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr;
BLangTernaryExpr parent = root;
while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) {
parent = (BLangTernaryExpr) parent.elseExpr;
}
ternaryExpr.expr = parent.elseExpr;
parent.elseExpr = ternaryExpr;
ternaryExpr = root;
}
addExpressionNode(ternaryExpr);
}
void createCheckedExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.addWS(ws);
checkedExpr.expr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(checkedExpr);
}
void createTrapExpr(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.pos = pos;
trapExpr.addWS(ws);
trapExpr.expr = (BLangExpression) exprNodeStack.pop();
addExpressionNode(trapExpr);
}
void endFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean publicFunc, boolean remoteFunc,
boolean nativeFunc, boolean bodyExists, boolean isReceiverAttached, boolean isLambda) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
if (!isLambda) {
function.addWS(invocationWsStack.pop());
}
if (publicFunc) {
function.flagSet.add(Flag.PUBLIC);
}
if (remoteFunc) {
function.flagSet.add(Flag.REMOTE);
}
if (nativeFunc) {
function.flagSet.add(Flag.NATIVE);
}
if (!bodyExists) {
function.body = null;
}
if (isReceiverAttached) {
TypeNode typeNode = this.typeNodeStack.pop();
BLangSimpleVariable receiver = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
receiver.pos = pos;
IdentifierNode name = createIdentifier(Names.SELF.getValue());
receiver.setName(name);
receiver.setTypeNode(typeNode);
function.receiver = receiver;
function.flagSet.add(Flag.ATTACHED);
}
if (!function.deprecatedAttachments.isEmpty()) {
function.flagSet.add(Flag.DEPRECATED);
}
this.compUnit.addTopLevelNode(function);
}
void addWorker(DiagnosticPos pos, Set<Whitespace> ws, String workerName, boolean retParamsAvail) {
endCallableUnitBody(ws);
((BLangFunction) this.invokableNodeStack.peek()).defaultWorkerName.value = workerName;
addLambdaFunctionDef(pos, ws, false, retParamsAvail, false);
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
addSimpleVariableDefStatement(pos, ws, workerLambdaName, true, true, true);
if (!this.forkJoinNodesStack.empty()) {
List<? extends StatementNode> stmtsAdded = this.blockNodeStack.peek().getStatements();
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) stmtsAdded.get(stmtsAdded.size() - 1);
this.forkJoinNodesStack.peek().addWorkers(lamdaWrkr);
}
addNameReference(pos, ws, null, workerLambdaName);
createSimpleVariableReference(pos, ws);
startInvocationNode(ws);
createInvocationNode(pos, ws, BLangBuiltInMethod.CALL.toString(), false, false);
markLastInvocationAsAsync(pos);
addSimpleVariableDefStatement(pos, ws, workerName, true, true, true);
}
void attachWorkerWS(Set<Whitespace> ws) {
}
void startForkJoinStmt() {
this.forkJoinNodesStack.push(TreeBuilder.createForkJoinNode());
}
void addForkJoinStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangForkJoin forkJoin = (BLangForkJoin) this.forkJoinNodesStack.pop();
forkJoin.pos = pos;
forkJoin.addWS(ws);
this.addStmtToCurrentBlock(forkJoin);
}
void endCallableUnitBody(Set<Whitespace> ws) {
BlockNode block = this.blockNodeStack.pop();
InvokableNode invokableNode = this.invokableNodeStack.peek();
invokableNode.addWS(ws);
invokableNode.setBody(block);
}
void addImportPackageDeclaration(DiagnosticPos pos,
Set<Whitespace> ws,
String orgName,
List<String> nameComps,
String version,
String alias) {
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
nameComps.forEach(e -> pkgNameComps.add((BLangIdentifier) this.createIdentifier(e)));
BLangIdentifier versionNode = (BLangIdentifier) this.createIdentifier(version);
BLangIdentifier aliasNode = (alias != null && !alias.isEmpty()) ?
(BLangIdentifier) this.createIdentifier(alias) :
pkgNameComps.get(pkgNameComps.size() - 1);
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = pos;
importDcl.addWS(ws);
importDcl.pkgNameComps = pkgNameComps;
importDcl.version = versionNode;
importDcl.orgName = (BLangIdentifier) this.createIdentifier(orgName);
importDcl.alias = aliasNode;
this.compUnit.addTopLevelNode(importDcl);
if (this.imports.contains(importDcl)) {
this.dlog.warning(pos, DiagnosticCode.REDECLARED_IMPORT_MODULE, importDcl.getQualifiedPackageName());
} else {
this.imports.add(importDcl);
}
}
private VariableNode generateBasicVarNodeWithoutType(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isExpressionAvailable) {
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
var.pos = pos;
IdentifierNode name = this.createIdentifier(identifier);
var.setName(name);
var.addWS(ws);
if (isExpressionAvailable) {
var.setInitialExpression(this.exprNodeStack.pop());
}
return var;
}
private LiteralNode generateConstantNode(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isTypeAvailable) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
constantNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.value = identifier;
constantNode.setName(name);
constantNode.addWS(ws);
if (isTypeAvailable) {
constantNode.setTypeNode(this.typeNodeStack.pop());
}
constantNode.setValue(this.exprNodeStack.pop());
return constantNode;
}
private VariableNode generateBasicVarNode(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isExpressionAvailable) {
return generateBasicVarNode(pos, ws, identifier, false, isExpressionAvailable);
}
private VariableNode generateBasicVarNode(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
boolean isDeclaredWithVar, boolean isExpressionAvailable) {
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
var.pos = pos;
IdentifierNode name = this.createIdentifier(identifier);
var.setName(name);
var.addWS(ws);
if (isDeclaredWithVar) {
var.isDeclaredWithVar = true;
} else {
var.setTypeNode(this.typeNodeStack.pop());
}
if (isExpressionAvailable) {
var.setInitialExpression(this.exprNodeStack.pop());
}
return var;
}
void addConstant(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean isPublic,
boolean isTypeAvailable) {
BLangConstant constantNode = (BLangConstant) this.generateConstantNode(pos, ws, identifier, isTypeAvailable);
attachAnnotations(constantNode);
constantNode.flagSet.add(Flag.CONSTANT);
if (isPublic) {
constantNode.flagSet.add(Flag.PUBLIC);
}
attachMarkdownDocumentations(constantNode);
attachDeprecatedNode(constantNode);
this.compUnit.addTopLevelNode(constantNode);
if (((BLangExpression) constantNode.value).getKind() == NodeKind.LITERAL) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.value).value);
literal.typeTag = ((BLangLiteral) constantNode.value).typeTag;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
}
void addGlobalVariable(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean isPublic, boolean isFinal,
boolean isDeclaredWithVar, boolean isExpressionAvailable, boolean isListenerVar) {
BLangVariable var = (BLangVariable) this.generateBasicVarNode(pos, ws, identifier, isDeclaredWithVar,
isExpressionAvailable);
if (isPublic) {
var.flagSet.add(Flag.PUBLIC);
}
if (isFinal) {
var.flagSet.add(Flag.FINAL);
}
if (isListenerVar) {
var.flagSet.add(Flag.LISTENER);
}
attachAnnotations(var);
attachMarkdownDocumentations(var);
attachDeprecatedNode(var);
this.compUnit.addTopLevelNode(var);
}
void startRecordType() {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
typeNodeStack.push(recordTypeNode);
startVarList();
}
void startObjectType() {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
typeNodeStack.push(objectTypeNode);
startVarList();
startFieldBlockList();
}
void addObjectType(DiagnosticPos pos, Set<Whitespace> ws, boolean isFieldAnalyseRequired, boolean isAnonymous,
boolean isAbstract, boolean isClient, boolean isService) {
BLangObjectTypeNode objectTypeNode = populateObjectTypeNode(pos, ws, isAnonymous);
objectTypeNode.addWS(this.objectFieldBlockWs.pop());
objectTypeNode.isFieldAnalyseRequired = isFieldAnalyseRequired;
if (isAbstract) {
objectTypeNode.flagSet.add(Flag.ABSTRACT);
}
if (isClient) {
objectTypeNode.flagSet.add(Flag.CLIENT);
}
if (isService) {
objectTypeNode.flagSet.add(Flag.SERVICE);
}
if (!isAnonymous) {
addType(objectTypeNode);
return;
}
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = objectTypeNode;
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
addType(createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name));
}
private BLangObjectTypeNode populateObjectTypeNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isAnonymous) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeNodeStack.pop();
objectTypeNode.pos = pos;
objectTypeNode.addWS(ws);
objectTypeNode.isAnonymous = isAnonymous;
this.varListStack.pop().forEach(variableNode -> {
objectTypeNode.addField((SimpleVariableNode) variableNode);
});
return objectTypeNode;
}
void startFieldBlockList() {
this.objectFieldBlockWs.push(new TreeSet<>());
}
void endFiniteType(Set<Whitespace> ws) {
finiteTypeWsStack.push(ws);
}
void endTypeDefinition(DiagnosticPos pos, Set<Whitespace> ws, String identifier, DiagnosticPos identifierPos,
boolean publicType) {
BLangTypeDefinition typeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode = (BLangIdentifier) this.createIdentifier(identifier);
identifierNode.pos = identifierPos;
typeDefinition.setName(identifierNode);
if (publicType) {
typeDefinition.flagSet.add(Flag.PUBLIC);
}
BLangUnionTypeNode members = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
while (!typeNodeStack.isEmpty()) {
BLangType memberType = (BLangType) typeNodeStack.pop();
if (memberType.getKind() == NodeKind.UNION_TYPE_NODE) {
members.memberTypeNodes.addAll(((BLangUnionTypeNode) memberType).memberTypeNodes);
members.addWS(memberType.getWS());
} else {
members.memberTypeNodes.add(memberType);
}
}
if (!exprNodeStack.isEmpty()) {
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.addWS(finiteTypeWsStack.pop());
while (!exprNodeStack.isEmpty()) {
finiteTypeNode.valueSpace.add((BLangExpression) exprNodeStack.pop());
}
Collections.reverse(finiteTypeNode.valueSpace);
if (!members.memberTypeNodes.isEmpty()) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
members.memberTypeNodes.add(createUserDefinedType(pos, ws,
(BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name));
} else {
members.memberTypeNodes.add(finiteTypeNode);
}
}
if (members.memberTypeNodes.isEmpty()) {
typeDefinition.typeNode = null;
} else if (members.memberTypeNodes.size() == 1) {
BLangType[] memberArray = new BLangType[1];
members.memberTypeNodes.toArray(memberArray);
typeDefinition.typeNode = memberArray[0];
} else {
typeDefinition.typeNode = members;
}
if (finiteTypeWsStack.size() > 0) {
typeDefinition.addWS(finiteTypeWsStack.pop());
}
typeDefinition.pos = pos;
typeDefinition.addWS(ws);
Collections.reverse(markdownDocumentationStack);
attachMarkdownDocumentations(typeDefinition);
attachDeprecatedNode(typeDefinition);
attachAnnotations(typeDefinition);
this.compUnit.addTopLevelNode(typeDefinition);
}
void endObjectAttachedFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean publicFunc, boolean privateFunc,
boolean remoteFunc, boolean resourceFunc, boolean nativeFunc, boolean bodyExists,
boolean markdownDocPresent, boolean deprecatedDocPresent, int annCount) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
function.addWS(this.invocationWsStack.pop());
function.flagSet.add(Flag.ATTACHED);
if (publicFunc) {
function.flagSet.add(Flag.PUBLIC);
} else if (privateFunc) {
function.flagSet.add(Flag.PRIVATE);
}
if (remoteFunc) {
function.flagSet.add(Flag.REMOTE);
}
if (resourceFunc) {
function.flagSet.add(Flag.RESOURCE);
}
if (nativeFunc) {
function.flagSet.add(Flag.NATIVE);
}
if (!bodyExists) {
function.body = null;
if (!nativeFunc) {
function.flagSet.add(Flag.INTERFACE);
function.interfaceFunction = true;
}
}
function.attachedFunction = true;
attachAnnotations(function, annCount);
if (markdownDocPresent) {
attachMarkdownDocumentations(function);
}
if (deprecatedDocPresent) {
attachDeprecatedNode(function);
}
if (!function.deprecatedAttachments.isEmpty()) {
function.flagSet.add(Flag.DEPRECATED);
}
BLangObjectTypeNode objectNode = (BLangObjectTypeNode) this.typeNodeStack.peek();
if (Names.OBJECT_INIT_SUFFIX.value.equals(function.name.value)) {
function.objInitFunction = true;
objectNode.initFunction = function;
return;
}
objectNode.addFunction(function);
}
void endObjectOuterFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean publicFunc, boolean remoteFunc,
boolean nativeFunc, boolean bodyExists, String objectName) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
function.addWS(invocationWsStack.pop());
if (publicFunc) {
function.flagSet.add(Flag.PUBLIC);
}
if (remoteFunc) {
function.flagSet.add(Flag.REMOTE);
}
if (nativeFunc) {
function.flagSet.add(Flag.NATIVE);
}
if (!bodyExists) {
function.body = null;
}
TypeNode objectType = createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(),
(BLangIdentifier) createIdentifier(objectName));
BLangSimpleVariable receiver = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
receiver.pos = pos;
IdentifierNode name = createIdentifier(Names.SELF.getValue());
receiver.setName(name);
receiver.setTypeNode(objectType);
function.receiver = receiver;
function.flagSet.add(Flag.ATTACHED);
function.attachedOuterFunction = true;
if (!function.deprecatedAttachments.isEmpty()) {
function.flagSet.add(Flag.DEPRECATED);
}
this.compUnit.addTopLevelNode(function);
}
void startAnnotationDef(DiagnosticPos pos) {
BLangAnnotation annotNode = (BLangAnnotation) TreeBuilder.createAnnotationNode();
annotNode.pos = pos;
attachAnnotations(annotNode);
attachMarkdownDocumentations(annotNode);
attachDeprecatedNode(annotNode);
this.annotationStack.add(annotNode);
}
void endAnnotationDef(Set<Whitespace> ws, String identifier, DiagnosticPos identifierPos, boolean publicAnnotation,
boolean isTypeAttached) {
BLangAnnotation annotationNode = (BLangAnnotation) this.annotationStack.pop();
annotationNode.addWS(ws);
BLangIdentifier identifierNode = (BLangIdentifier) this.createIdentifier(identifier);
identifierNode.pos = identifierPos;
annotationNode.setName(identifierNode);
if (publicAnnotation) {
annotationNode.flagSet.add(Flag.PUBLIC);
}
while (!attachPointStack.empty()) {
annotationNode.attachPoints.add(attachPointStack.pop());
}
if (isTypeAttached) {
annotationNode.typeNode = (BLangType) this.typeNodeStack.pop();
}
this.compUnit.addTopLevelNode(annotationNode);
}
void startMarkdownDocumentationString(DiagnosticPos currentPos) {
BLangMarkdownDocumentation markdownDocumentationNode =
(BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode();
markdownDocumentationNode.pos = currentPos;
markdownDocumentationStack.push(markdownDocumentationNode);
}
void endMarkdownDocumentationString(Set<Whitespace> ws) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
markdownDocumentationNode.addWS(ws);
}
void endMarkDownDocumentLine(Set<Whitespace> ws) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
markdownDocumentationNode.addWS(ws);
}
void endMarkdownDocumentationText(DiagnosticPos pos, Set<Whitespace> ws, String text) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownDocumentationLine documentationDescription =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
documentationDescription.text = text;
documentationDescription.pos = pos;
documentationDescription.addWS(ws);
markdownDocumentationNode.addDocumentationLine(documentationDescription);
}
void endParameterDocumentationLine(Set<Whitespace> ws) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
markdownDocumentationNode.addWS(ws);
}
void endParameterDocumentation(DiagnosticPos pos, Set<Whitespace> ws, String parameterName, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownParameterDocumentation parameterDocumentationNode =
(BLangMarkdownParameterDocumentation) TreeBuilder.createMarkdownParameterDocumentationNode();
parameterDocumentationNode.parameterName = (BLangIdentifier) createIdentifier(parameterName);
parameterDocumentationNode.pos = pos;
parameterDocumentationNode.addWS(ws);
parameterDocumentationNode.addParameterDocumentationLine(description);
markdownDocumentationNode.addParameter(parameterDocumentationNode);
}
void endParameterDocumentationDescription(Set<Whitespace> ws, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownParameterDocumentation parameterDocumentation =
markdownDocumentationNode.getParameters().getLast();
parameterDocumentation.addWS(ws);
parameterDocumentation.addParameterDocumentationLine(description);
}
void endReturnParameterDocumentation(DiagnosticPos pos, Set<Whitespace> ws, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownReturnParameterDocumentation returnParameterDocumentation =
(BLangMarkdownReturnParameterDocumentation) TreeBuilder
.createMarkdownReturnParameterDocumentationNode();
returnParameterDocumentation.pos = pos;
returnParameterDocumentation.addWS(ws);
returnParameterDocumentation.addReturnParameterDocumentationLine(description);
markdownDocumentationNode.setReturnParameter(returnParameterDocumentation);
}
void endReturnParameterDocumentationDescription(Set<Whitespace> ws, String description) {
MarkdownDocumentationNode markdownDocumentationNode = markdownDocumentationStack.peek();
BLangMarkdownReturnParameterDocumentation returnParameter = markdownDocumentationNode.getReturnParameter();
returnParameter.addWS(ws);
returnParameter.addReturnParameterDocumentationLine(description);
}
void createDeprecatedNode(DiagnosticPos pos,
Set<Whitespace> ws,
String content) {
BLangDeprecatedNode deprecatedNode = (BLangDeprecatedNode) TreeBuilder.createDeprecatedNode();
deprecatedNode.pos = pos;
deprecatedNode.addWS(ws);
deprecatedNode.documentationText = content;
deprecatedAttachmentStack.push(deprecatedNode);
}
void startAnnotationAttachment(DiagnosticPos currentPos) {
BLangAnnotationAttachment annotAttachmentNode =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
annotAttachmentNode.pos = currentPos;
annotAttachmentStack.push(annotAttachmentNode);
}
void setAnnotationAttachmentName(Set<Whitespace> ws, boolean hasExpr, DiagnosticPos currentPos,
boolean popAnnAttachment) {
BLangNameReference nameReference = nameReferenceStack.pop();
BLangAnnotationAttachment bLangAnnotationAttachment = (BLangAnnotationAttachment) annotAttachmentStack.peek();
bLangAnnotationAttachment.pos = currentPos;
bLangAnnotationAttachment.addWS(nameReference.ws);
bLangAnnotationAttachment.addWS(ws);
bLangAnnotationAttachment.setAnnotationName(nameReference.name);
bLangAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
if (hasExpr) {
bLangAnnotationAttachment.setExpression(exprNodeStack.pop());
}
if (popAnnAttachment) {
annotAttachmentStack.pop();
}
}
private void attachAnnotations(AnnotatableNode annotatableNode) {
annotAttachmentStack.forEach(annotatableNode::addAnnotationAttachment);
annotAttachmentStack.clear();
}
private void attachMarkdownDocumentations(DocumentableNode documentableNode) {
if (!markdownDocumentationStack.empty()) {
documentableNode.setMarkdownDocumentationAttachment(markdownDocumentationStack.pop());
}
}
private void attachDeprecatedNode(DocumentableNode documentableNode) {
if (!deprecatedAttachmentStack.empty()) {
documentableNode.addDeprecatedAttachment(deprecatedAttachmentStack.pop());
}
}
private void attachAnnotations(AnnotatableNode annotatableNode, int count) {
if (count == 0 || annotAttachmentStack.empty()) {
return;
}
List<AnnotationAttachmentNode> tempAnnotAttachments = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
if (annotAttachmentStack.empty()) {
break;
}
tempAnnotAttachments.add(annotAttachmentStack.pop());
}
Collections.reverse(tempAnnotAttachments);
tempAnnotAttachments.forEach(annotatableNode::addAnnotationAttachment);
}
void addAssignmentStatement(DiagnosticPos pos, Set<Whitespace> ws) {
ExpressionNode rExprNode = exprNodeStack.pop();
ExpressionNode lExprNode = exprNodeStack.pop();
BLangAssignment assignmentNode = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentNode.setExpression(rExprNode);
assignmentNode.pos = pos;
assignmentNode.addWS(ws);
assignmentNode.varRef = ((BLangVariableReference) lExprNode);
addStmtToCurrentBlock(assignmentNode);
}
void addTupleDestructuringStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean varDeclaration) {
BLangTupleDestructure stmt = (BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode();
stmt.pos = pos;
stmt.addWS(ws);
stmt.setDeclaredWithVar(varDeclaration);
stmt.expr = (BLangExpression) exprNodeStack.pop();
stmt.varRef = (BLangTupleVarRef) exprNodeStack.pop();
stmt.addWS(ws);
addStmtToCurrentBlock(stmt);
}
public void addRecordDestructuringStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean declaredWithVar) {
BLangRecordDestructure stmt = (BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode();
stmt.pos = pos;
stmt.addWS(ws);
stmt.setDeclaredWithVar(declaredWithVar);
stmt.expr = (BLangExpression) exprNodeStack.pop();
stmt.varRef = (BLangRecordVarRef) exprNodeStack.pop();
addStmtToCurrentBlock(stmt);
}
void startForeachStatement() {
startBlock();
}
void addCompoundAssignmentStatement(DiagnosticPos pos, Set<Whitespace> ws, String operator) {
BLangCompoundAssignment assignmentNode =
(BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
assignmentNode.setExpression(exprNodeStack.pop());
assignmentNode.setVariable((BLangVariableReference) exprNodeStack.pop());
assignmentNode.pos = pos;
assignmentNode.addWS(ws);
assignmentNode.addWS(this.operatorWs.pop());
assignmentNode.opKind = OperatorKind.valueFrom(operator);
addStmtToCurrentBlock(assignmentNode);
}
void addCompoundOperator(Set<Whitespace> ws) {
this.operatorWs.push(ws);
}
void addForeachStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.addWS(ws);
foreach.pos = pos;
foreach.setCollection(exprNodeStack.pop());
foreach.addWS(commaWsStack.pop());
List<ExpressionNode> lExprList = exprNodeListStack.pop();
lExprList.forEach(expressionNode -> foreach.addVariable((BLangVariableReference) expressionNode));
BLangBlockStmt foreachBlock = (BLangBlockStmt) this.blockNodeStack.pop();
foreachBlock.pos = pos;
foreach.setBody(foreachBlock);
addStmtToCurrentBlock(foreach);
}
void startWhileStmt() {
startBlock();
}
void addWhileStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.setCondition(exprNodeStack.pop());
whileNode.pos = pos;
whileNode.addWS(ws);
BLangBlockStmt whileBlock = (BLangBlockStmt) this.blockNodeStack.pop();
whileBlock.pos = pos;
whileNode.setBody(whileBlock);
addStmtToCurrentBlock(whileNode);
}
void startLockStmt() {
startBlock();
}
void addLockStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode();
lockNode.pos = pos;
lockNode.addWS(ws);
BLangBlockStmt lockBlock = (BLangBlockStmt) this.blockNodeStack.pop();
lockBlock.pos = pos;
lockNode.setBody(lockBlock);
addStmtToCurrentBlock(lockNode);
}
public void addContinueStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangContinue nextNode = (BLangContinue) TreeBuilder.createContinueNode();
nextNode.pos = pos;
nextNode.addWS(ws);
addStmtToCurrentBlock(nextNode);
}
void addBreakStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangBreak breakNode = (BLangBreak) TreeBuilder.createBreakNode();
breakNode.pos = pos;
breakNode.addWS(ws);
addStmtToCurrentBlock(breakNode);
}
void addReturnStatement(DiagnosticPos pos, Set<Whitespace> ws, boolean exprAvailable) {
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.pos = pos;
retStmt.addWS(ws);
if (exprAvailable) {
retStmt.expr = (BLangExpression) this.exprNodeStack.pop();
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = pos;
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.typeTag = TypeTags.NIL;
retStmt.expr = nilLiteral;
}
addStmtToCurrentBlock(retStmt);
}
void startTransactionStmt() {
transactionNodeStack.push(TreeBuilder.createTransactionNode());
startBlock();
}
void addTransactionBlock(DiagnosticPos pos, Set<Whitespace> ws) {
TransactionNode transactionNode = transactionNodeStack.peek();
BLangBlockStmt transactionBlock = (BLangBlockStmt) this.blockNodeStack.pop();
transactionBlock.pos = pos;
transactionNode.addWS(ws);
transactionNode.setTransactionBody(transactionBlock);
}
void endTransactionPropertyInitStatementList(Set<Whitespace> ws) {
TransactionNode transactionNode = transactionNodeStack.peek();
transactionNode.addWS(ws);
}
void startOnretryBlock() {
startBlock();
}
void addOnretryBlock(DiagnosticPos pos, Set<Whitespace> ws) {
TransactionNode transactionNode = transactionNodeStack.peek();
BLangBlockStmt onretryBlock = (BLangBlockStmt) this.blockNodeStack.pop();
onretryBlock.pos = pos;
transactionNode.addWS(ws);
transactionNode.setOnRetryBody(onretryBlock);
}
void endTransactionStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.pop();
transaction.pos = pos;
transaction.addWS(ws);
addStmtToCurrentBlock(transaction);
String value = compilerOptions.get(CompilerOptionName.TRANSACTION_EXISTS);
if (value != null) {
return;
}
compilerOptions.put(CompilerOptionName.TRANSACTION_EXISTS, "true");
List<String> nameComps = getPackageNameComps(Names.TRANSACTION_PACKAGE.value);
addImportPackageDeclaration(pos, null, Names.TRANSACTION_ORG.value, nameComps, Names.DEFAULT_VERSION.value,
Names.DOT.value + nameComps.get(nameComps.size() - 1));
}
void addAbortStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangAbort abortNode = (BLangAbort) TreeBuilder.createAbortNode();
abortNode.pos = pos;
abortNode.addWS(ws);
addStmtToCurrentBlock(abortNode);
}
void addRetryStatement(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode();
retryNode.pos = pos;
retryNode.addWS(ws);
addStmtToCurrentBlock(retryNode);
}
void addRetryCountExpression(Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.peek();
transaction.addWS(ws);
transaction.retryCount = (BLangExpression) exprNodeStack.pop();
}
void addCommittedBlock(Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.peek();
transaction.addWS(ws);
transaction.onCommitFunction = (BLangExpression) exprNodeStack.pop();
}
void addAbortedBlock(Set<Whitespace> ws) {
BLangTransaction transaction = (BLangTransaction) transactionNodeStack.peek();
transaction.addWS(ws);
transaction.onAbortFunction = (BLangExpression) exprNodeStack.pop();
}
void startIfElseNode(DiagnosticPos pos) {
BLangIf ifNode = (BLangIf) TreeBuilder.createIfElseStatementNode();
ifNode.pos = pos;
ifElseStatementStack.push(ifNode);
startBlock();
}
void addIfBlock(DiagnosticPos pos, Set<Whitespace> ws) {
IfNode ifNode = ifElseStatementStack.peek();
((BLangIf) ifNode).pos = pos;
ifNode.addWS(ws);
ifNode.setCondition(exprNodeStack.pop());
ifNode.setBody(blockNodeStack.pop());
}
void addElseIfBlock(DiagnosticPos pos, Set<Whitespace> ws) {
IfNode elseIfNode = ifElseStatementStack.pop();
((BLangIf) elseIfNode).pos = pos;
elseIfNode.setCondition(exprNodeStack.pop());
elseIfNode.setBody(blockNodeStack.pop());
elseIfNode.addWS(ws);
IfNode parentIfNode = ifElseStatementStack.peek();
while (parentIfNode.getElseStatement() != null) {
parentIfNode = (IfNode) parentIfNode.getElseStatement();
}
parentIfNode.setElseStatement(elseIfNode);
}
void addElseBlock(DiagnosticPos pos, Set<Whitespace> ws) {
IfNode ifNode = ifElseStatementStack.peek();
while (ifNode.getElseStatement() != null) {
ifNode = (IfNode) ifNode.getElseStatement();
}
BlockNode elseBlock = blockNodeStack.pop();
elseBlock.addWS(ws);
((BLangBlockStmt) elseBlock).pos = pos;
ifNode.setElseStatement(elseBlock);
}
void endIfElseNode(Set<Whitespace> ws) {
IfNode ifNode = ifElseStatementStack.pop();
ifNode.addWS(ws);
addStmtToCurrentBlock(ifNode);
}
void createMatchNode(DiagnosticPos pos) {
if (this.matchStmtStack == null) {
this.matchStmtStack = new ArrayDeque<>();
}
BLangMatch matchStmt = (BLangMatch) TreeBuilder.createMatchStatement();
matchStmt.pos = pos;
this.matchStmtStack.addFirst(matchStmt);
}
void completeMatchNode(DiagnosticPos pos, Set<Whitespace> ws) {
BLangMatch matchStmt = this.matchStmtStack.removeFirst();
matchStmt.pos = pos;
matchStmt.addWS(ws);
matchStmt.expr = (BLangExpression) this.exprNodeStack.pop();
addStmtToCurrentBlock(matchStmt);
}
void startMatchStmtPattern() {
startBlock();
}
void addMatchStmtStaticBindingPattern(DiagnosticPos pos, Set<Whitespace> ws) {
BLangMatchStaticBindingPatternClause patternClause =
(BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern();
patternClause.pos = pos;
patternClause.addWS(ws);
patternClause.literal = (BLangExpression) this.exprNodeStack.pop();
patternClause.body = (BLangBlockStmt) blockNodeStack.pop();
patternClause.body.pos = pos;
this.matchStmtStack.peekFirst().patternClauses.add(patternClause);
}
void addMatchStmtStructuredBindingPattern(DiagnosticPos pos, Set<Whitespace> ws, boolean isTypeGuardPresent) {
BLangMatchStructuredBindingPatternClause patternClause =
(BLangMatchStructuredBindingPatternClause) TreeBuilder.createMatchStatementStructuredBindingPattern();
patternClause.pos = pos;
patternClause.addWS(ws);
patternClause.bindingPatternVariable = this.varStack.pop();
patternClause.body = (BLangBlockStmt) blockNodeStack.pop();
patternClause.body.pos = pos;
if (isTypeGuardPresent) {
patternClause.typeGuardExpr = (BLangExpression) exprNodeStack.pop();
}
this.matchStmtStack.peekFirst().patternClauses.add(patternClause);
}
void addWorkerSendStmt(DiagnosticPos pos, Set<Whitespace> ws, String workerName, boolean hasKey) {
BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode();
workerSendNode.setWorkerName(this.createIdentifier(workerName));
workerSendNode.expr = (BLangExpression) exprNodeStack.pop();
workerSendNode.pos = pos;
workerSendNode.addWS(ws);
if (hasKey) {
workerSendNode.keyExpr = workerSendNode.expr;
workerSendNode.expr = (BLangExpression) exprNodeStack.pop();
workerSendNode.isChannel = true;
}
addStmtToCurrentBlock(workerSendNode);
}
void addWorkerReceiveExpr(DiagnosticPos pos, Set<Whitespace> ws, String workerName, boolean hasKey) {
BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode();
workerReceiveExpr.setWorkerName(this.createIdentifier(workerName));
workerReceiveExpr.pos = pos;
workerReceiveExpr.addWS(ws);
if (hasKey) {
workerReceiveExpr.keyExpr = (BLangExpression) exprNodeStack.pop();
workerReceiveExpr.isChannel = true;
}
addExpressionNode(workerReceiveExpr);
}
void addWorkerFlushExpr(DiagnosticPos pos, Set<Whitespace> ws, String workerName) {
BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode();
if (workerName != null) {
workerFlushExpr.workerIdentifier = (BLangIdentifier) createIdentifier(workerName);
}
workerFlushExpr.pos = pos;
workerFlushExpr.addWS(ws);
addExpressionNode(workerFlushExpr);
}
void addWorkerSendSyncExpr(DiagnosticPos pos, Set<Whitespace> ws, String workerName) {
BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode();
workerSendExpr.setWorkerName(this.createIdentifier(workerName));
workerSendExpr.expr = (BLangExpression) exprNodeStack.pop();
workerSendExpr.pos = pos;
workerSendExpr.addWS(ws);
addExpressionNode(workerSendExpr);
}
void addExpressionStmt(DiagnosticPos pos, Set<Whitespace> ws) {
BLangExpressionStmt exprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
exprStmt.pos = pos;
exprStmt.addWS(ws);
exprStmt.expr = (BLangExpression) exprNodeStack.pop();
addStmtToCurrentBlock(exprStmt);
}
void startServiceDef(DiagnosticPos pos) {
BLangService serviceNode = (BLangService) TreeBuilder.createServiceNode();
serviceNode.pos = pos;
attachAnnotations(serviceNode);
attachMarkdownDocumentations(serviceNode);
attachDeprecatedNode(serviceNode);
serviceNodeStack.push(serviceNode);
}
void endServiceDef(DiagnosticPos pos, Set<Whitespace> ws, String serviceName, DiagnosticPos identifierPos,
boolean isAnonServiceValue) {
BLangService serviceNode = (BLangService) serviceNodeStack.pop();
serviceNode.pos = pos;
serviceNode.addWS(ws);
serviceNode.isAnonymousServiceValue = isAnonServiceValue;
if (serviceName == null) {
serviceName = this.anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
identifierPos = pos;
}
BLangIdentifier identifier = (BLangIdentifier) createIdentifier(serviceName);
identifier.pos = identifierPos;
serviceNode.setName(identifier);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
typeDef.setName(identifier);
typeDef.flagSet.add(Flag.SERVICE);
typeDef.typeNode = (BLangType) this.typeNodeStack.pop();
typeDef.pos = pos;
this.compUnit.addTopLevelNode(typeDef);
serviceNode.serviceTypeDefinition = typeDef;
serviceNode.serviceUDT = createUserDefinedType(pos, ws, (BLangIdentifier) TreeBuilder.createIdentifierNode(),
typeDef.name);
this.compUnit.addTopLevelNode(serviceNode);
if (!isAnonServiceValue) {
serviceNode.attachExpr = (BLangExpression) this.exprNodeStack.pop();
return;
}
final BLangServiceConstructorExpr serviceConstNode = (BLangServiceConstructorExpr) TreeBuilder
.createServiceConstructorNode();
serviceConstNode.serviceNode = serviceNode;
serviceConstNode.pos = pos;
serviceConstNode.addWS(ws);
addExpressionNode(serviceConstNode);
}
void createXMLQName(DiagnosticPos pos, Set<Whitespace> ws, String localname, String prefix) {
BLangXMLQName qname = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
qname.localname = (BLangIdentifier) createIdentifier(localname);
qname.prefix = (BLangIdentifier) createIdentifier(prefix);
qname.pos = pos;
qname.addWS(ws);
addExpressionNode(qname);
}
void createXMLAttribute(DiagnosticPos pos, Set<Whitespace> ws) {
BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode();
xmlAttribute.value = (BLangXMLQuotedString) exprNodeStack.pop();
xmlAttribute.name = (BLangExpression) exprNodeStack.pop();
xmlAttribute.pos = pos;
xmlAttribute.addWS(ws);
xmlAttributeNodeStack.push(xmlAttribute);
}
void attachXmlLiteralWS(Set<Whitespace> ws) {
this.exprNodeStack.peek().addWS(ws);
}
void startXMLElement(DiagnosticPos pos, Set<Whitespace> ws, boolean isRoot) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
BLangExpression startTag = (BLangExpression) exprNodeStack.pop();
xmlElement.addWS(ws);
xmlElement.startTagName = startTag;
xmlElement.pos = pos;
xmlElement.isRoot = isRoot;
xmlAttributeNodeStack.forEach(xmlElement::addAttribute);
xmlAttributeNodeStack.clear();
addExpressionNode(xmlElement);
}
void endXMLElement(Set<Whitespace> ws) {
BLangExpression endTag = (BLangExpression) exprNodeStack.pop();
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) exprNodeStack.peek();
xmlElement.addWS(ws);
xmlElement.endTagName = endTag;
}
void createXMLQuotedLiteral(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText,
QuoteType quoteType) {
List<BLangExpression> templateExprs =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode();
quotedString.pos = pos;
quotedString.quoteType = quoteType;
quotedString.textFragments = templateExprs;
addExpressionNode(quotedString);
}
void addChildToXMLElement(Set<Whitespace> ws) {
XMLLiteralNode child = (XMLLiteralNode) exprNodeStack.pop();
child.addWS(ws);
BLangXMLElementLiteral parentXMLExpr = (BLangXMLElementLiteral) exprNodeStack.peek();
parentXMLExpr.addChild(child);
}
void createXMLTextLiteral(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
xmlTextLiteral.pos = pos;
addExpressionNode(xmlTextLiteral);
}
void addXMLTextToElement(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
List<BLangExpression> templateExprs =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
BLangXMLElementLiteral parentElement = (BLangXMLElementLiteral) exprNodeStack.peek();
templateExprs.forEach(parentElement::addChild);
}
void createXMLCommentLiteral(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
xmlCommentLiteral.textFragments =
getExpressionsInTemplate(pos, null, precedingTextFragments, endingText);
xmlCommentLiteral.pos = pos;
xmlCommentLiteral.addWS(ws);
addExpressionNode(xmlCommentLiteral);
}
void createXMLPILiteral(DiagnosticPos pos,
Set<Whitespace> ws,
String targetQName,
Stack<String> precedingTextFragments,
String endingText) {
List<BLangExpression> dataExprs =
getExpressionsInTemplate(pos, ws, precedingTextFragments, endingText);
addLiteralValue(pos, ws, TypeTags.STRING, targetQName);
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
xmlProcInsLiteral.pos = pos;
xmlProcInsLiteral.dataFragments = dataExprs;
xmlProcInsLiteral.target = (BLangLiteral) exprNodeStack.pop();
addExpressionNode(xmlProcInsLiteral);
}
void addXMLNSDeclaration(DiagnosticPos pos,
Set<Whitespace> ws,
String namespaceUri,
String prefix,
boolean isTopLevel) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
prefixIdentifer.pos = pos;
prefixIdentifer.value = prefix;
addLiteralValue(pos, removeNthFromStart(ws, 1), TypeTags.STRING, namespaceUri);
xmlns.namespaceURI = (BLangLiteral) exprNodeStack.pop();
xmlns.prefix = prefixIdentifer;
xmlns.pos = pos;
xmlns.addWS(ws);
if (isTopLevel) {
this.compUnit.addTopLevelNode(xmlns);
return;
}
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = pos;
addStmtToCurrentBlock(xmlnsStmt);
}
void createStringTemplateLiteral(DiagnosticPos pos, Set<Whitespace> ws, Stack<String> precedingTextFragments,
String endingText) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
stringTemplateLiteral.exprs =
getExpressionsInTemplate(pos, null, precedingTextFragments, endingText);
stringTemplateLiteral.addWS(ws);
stringTemplateLiteral.pos = pos;
addExpressionNode(stringTemplateLiteral);
}
void createXmlAttributesRefExpr(DiagnosticPos pos, Set<Whitespace> ws, boolean singleAttribute) {
BLangXMLAttributeAccess xmlAttributeAccess =
(BLangXMLAttributeAccess) TreeBuilder.createXMLAttributeAccessNode();
xmlAttributeAccess.pos = pos;
xmlAttributeAccess.addWS(ws);
if (singleAttribute) {
xmlAttributeAccess.indexExpr = (BLangExpression) exprNodeStack.pop();
}
xmlAttributeAccess.expr = (BLangVariableReference) exprNodeStack.pop();
addExpressionNode(xmlAttributeAccess);
}
void addIntRangeExpression(DiagnosticPos pos,
Set<Whitespace> ws,
boolean includeStart,
boolean includeEnd,
boolean noUpperBound) {
BLangIntRangeExpression intRangeExpr = (BLangIntRangeExpression) TreeBuilder.createIntRangeExpression();
intRangeExpr.pos = pos;
intRangeExpr.addWS(ws);
if (!noUpperBound) {
intRangeExpr.endExpr = (BLangExpression) this.exprNodeStack.pop();
}
intRangeExpr.startExpr = (BLangExpression) this.exprNodeStack.pop();
intRangeExpr.includeStart = includeStart;
intRangeExpr.includeEnd = includeEnd;
exprNodeStack.push(intRangeExpr);
}
void addNamedArgument(DiagnosticPos pos, Set<Whitespace> ws, String name) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = pos;
namedArg.addWS(ws);
namedArg.name = (BLangIdentifier) this.createIdentifier(name);
namedArg.expr = (BLangExpression) this.exprNodeStack.pop();
addExpressionNode(namedArg);
}
void addRestArgument(DiagnosticPos pos, Set<Whitespace> ws) {
BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
varArgs.pos = pos;
varArgs.addWS(ws);
varArgs.expr = (BLangExpression) this.exprNodeStack.pop();
addExpressionNode(varArgs);
}
void addDefaultableParam(DiagnosticPos pos, Set<Whitespace> ws) {
BLangSimpleVariableDef defaultableParam =
(BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
defaultableParam.pos = pos;
defaultableParam.addWS(ws);
List<BLangVariable> params = this.varListStack.peek();
BLangSimpleVariable var = (BLangSimpleVariable) params.remove(params.size() - 1);
var.expr = (BLangExpression) this.exprNodeStack.pop();
defaultableParam.var = var;
this.defaultableParamsList.add(defaultableParam);
}
void addRestParam(DiagnosticPos pos, Set<Whitespace> ws, String identifier, int annotCount) {
BLangSimpleVariable restParam = (BLangSimpleVariable) this.generateBasicVarNode(pos, ws, identifier, false);
attachAnnotations(restParam, annotCount);
restParam.pos = pos;
BLangArrayType typeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
typeNode.elemtype = restParam.typeNode;
typeNode.dimensions = 1;
restParam.typeNode = typeNode;
this.restParamStack.push(restParam);
}
private List<BLangExpression> getExpressionsInTemplate(DiagnosticPos pos,
Set<Whitespace> ws,
Stack<String> precedingTextFragments,
String endingText) {
List<BLangExpression> expressions = new ArrayList<>();
String originalValue = endingText;
endingText = endingText == null ? "" : StringEscapeUtils.unescapeJava(endingText);
addLiteralValue(pos, ws, TypeTags.STRING, endingText, originalValue);
expressions.add((BLangExpression) exprNodeStack.pop());
while (!precedingTextFragments.empty()) {
expressions.add((BLangExpression) exprNodeStack.pop());
String textFragment = precedingTextFragments.pop();
originalValue = textFragment;
textFragment = textFragment == null ? "" : StringEscapeUtils.unescapeJava(textFragment);
addLiteralValue(pos, ws, TypeTags.STRING, textFragment, originalValue);
expressions.add((BLangExpression) exprNodeStack.pop());
}
Collections.reverse(expressions);
return expressions;
}
void endCompilationUnit(Set<Whitespace> ws) {
compUnit.addWS(ws);
}
void endCallableParamList(Set<Whitespace> ws) {
this.invokableNodeStack.peek().addWS(ws);
}
void endFuncTypeParamList(Set<Whitespace> ws) {
this.commaWsStack.push(ws);
}
private Set<Whitespace> removeNthFromLast(Set<Whitespace> ws, int n) {
if (ws == null) {
return null;
}
return removeNth(((TreeSet<Whitespace>) ws).descendingIterator(), n);
}
private Set<Whitespace> removeNthFromStart(Set<Whitespace> ws, int n) {
if (ws == null) {
return null;
}
return removeNth(ws.iterator(), n);
}
private Set<Whitespace> removeNth(Iterator<Whitespace> iterator, int n) {
int i = 0;
while (iterator.hasNext()) {
Whitespace next = iterator.next();
if (i++ == n) {
Set<Whitespace> varWS = new TreeSet<>();
varWS.add(next);
iterator.remove();
return varWS;
}
}
return null;
}
private BLangUserDefinedType createUserDefinedType(DiagnosticPos pos,
Set<Whitespace> ws,
BLangIdentifier pkgAlias,
BLangIdentifier name) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
userDefinedType.pos = pos;
userDefinedType.addWS(ws);
userDefinedType.pkgAlias = pkgAlias;
userDefinedType.typeName = name;
return userDefinedType;
}
private List<String> getPackageNameComps(String sourcePkg) {
String[] pkgParts = sourcePkg.split("\\.|\\\\|\\/");
return Arrays.asList(pkgParts);
}
void startOrderByClauseNode(DiagnosticPos pos) {
OrderByNode orderByNode = TreeBuilder.createOrderByNode();
((BLangOrderBy) orderByNode).pos = pos;
this.orderByClauseStack.push(orderByNode);
}
void endOrderByClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
OrderByNode orderByNode = this.orderByClauseStack.peek();
((BLangOrderBy) orderByNode).pos = pos;
orderByNode.addWS(ws);
Collections.reverse(orderByVariableStack);
while (!this.orderByVariableStack.empty()) {
orderByNode.addOrderByVariable(this.orderByVariableStack.pop());
}
}
void startOrderByVariableNode(DiagnosticPos pos) {
OrderByVariableNode orderByVariableNode = TreeBuilder.createOrderByVariableNode();
((BLangOrderByVariable) orderByVariableNode).pos = pos;
this.orderByVariableStack.push(orderByVariableNode);
}
void endOrderByVariableNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isAscending,
boolean isDescending) {
OrderByVariableNode orderByVariableNode = this.orderByVariableStack.peek();
((BLangOrderByVariable) orderByVariableNode).pos = pos;
orderByVariableNode.addWS(ws);
orderByVariableNode.setVariableReference(this.exprNodeStack.pop());
orderByVariableNode.setOrderByType(isAscending, isDescending);
}
void startLimitClauseNode(DiagnosticPos pos) {
LimitNode limitNode = TreeBuilder.createLimitNode();
((BLangLimit) limitNode).pos = pos;
this.limitClauseStack.push(limitNode);
}
void endLimitClauseNode(DiagnosticPos pos, Set<Whitespace> ws, String limitValue) {
LimitNode limitNode = this.limitClauseStack.peek();
((BLangLimit) limitNode).pos = pos;
limitNode.addWS(ws);
limitNode.setLimitValue(limitValue);
}
void startGroupByClauseNode(DiagnosticPos pos) {
GroupByNode groupByNode = TreeBuilder.createGroupByNode();
((BLangGroupBy) groupByNode).pos = pos;
this.groupByClauseStack.push(groupByNode);
}
void endGroupByClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
GroupByNode groupByNode = this.groupByClauseStack.peek();
((BLangGroupBy) groupByNode).pos = pos;
groupByNode.addWS(ws);
groupByNode.addWS(commaWsStack.pop());
this.exprNodeListStack.pop().forEach(groupByNode::addVariableReference);
}
void startHavingClauseNode(DiagnosticPos pos) {
HavingNode havingNode = TreeBuilder.createHavingNode();
((BLangHaving) havingNode).pos = pos;
this.havingClauseStack.push(havingNode);
}
void endHavingClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
HavingNode havingNode = this.havingClauseStack.peek();
((BLangHaving) havingNode).pos = pos;
havingNode.addWS(ws);
havingNode.setExpression(this.exprNodeStack.pop());
}
void startSelectExpressionNode(DiagnosticPos pos) {
SelectExpressionNode selectExpr = TreeBuilder.createSelectExpressionNode();
((BLangSelectExpression) selectExpr).pos = pos;
this.selectExpressionsStack.push(selectExpr);
}
void endSelectExpressionNode(String identifier, DiagnosticPos pos, Set<Whitespace> ws) {
SelectExpressionNode selectExpression = this.selectExpressionsStack.peek();
selectExpression.setExpression(exprNodeStack.pop());
((BLangSelectExpression) selectExpression).pos = pos;
selectExpression.addWS(ws);
selectExpression.setIdentifier(identifier);
}
void startSelectExpressionList() {
this.selectExpressionsListStack.push(new ArrayList<>());
}
void endSelectExpressionList(Set<Whitespace> ws, int selectExprCount) {
commaWsStack.push(ws);
List<SelectExpressionNode> selectExprList = this.selectExpressionsListStack.peek();
addSelectExprToSelectExprNodeList(selectExprList, selectExprCount);
}
private void addSelectExprToSelectExprNodeList(List<SelectExpressionNode> selectExprList, int n) {
if (this.selectExpressionsStack.empty()) {
throw new IllegalStateException("Select expression stack cannot be empty in processing a SelectClause");
}
SelectExpressionNode expr = this.selectExpressionsStack.pop();
if (n > 1) {
addSelectExprToSelectExprNodeList(selectExprList, n - 1);
}
selectExprList.add(expr);
}
void startWhereClauseNode(DiagnosticPos pos) {
WhereNode whereNode = TreeBuilder.createWhereNode();
((BLangWhere) whereNode).pos = pos;
this.whereClauseStack.push(whereNode);
}
void endWhereClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
WhereNode whereNode = this.whereClauseStack.peek();
((BLangWhere) whereNode).pos = pos;
whereNode.addWS(ws);
whereNode.setExpression(exprNodeStack.pop());
}
void startSelectClauseNode(DiagnosticPos pos) {
SelectClauseNode selectClauseNode = TreeBuilder.createSelectClauseNode();
((BLangSelectClause) selectClauseNode).pos = pos;
this.selectClausesStack.push(selectClauseNode);
}
void endSelectClauseNode(boolean isSelectAll, boolean isGroupByAvailable, boolean isHavingAvailable,
DiagnosticPos pos, Set<Whitespace> ws) {
SelectClauseNode selectClauseNode = this.selectClausesStack.peek();
((BLangSelectClause) selectClauseNode).pos = pos;
selectClauseNode.addWS(ws);
if (!isSelectAll) {
selectClauseNode.addWS(commaWsStack.pop());
selectClauseNode.setSelectExpressions(this.selectExpressionsListStack.pop());
} else {
selectClauseNode.setSelectAll(true);
}
if (isGroupByAvailable) {
selectClauseNode.setGroupBy(this.groupByClauseStack.pop());
}
if (isHavingAvailable) {
selectClauseNode.setHaving(this.havingClauseStack.pop());
}
}
void startWindowClauseNode(DiagnosticPos pos) {
WindowClauseNode windowClauseNode = TreeBuilder.createWindowClauseNode();
((BLangWindow) windowClauseNode).pos = pos;
this.windowClausesStack.push(windowClauseNode);
}
void endWindowsClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
WindowClauseNode windowClauseNode = this.windowClausesStack.peek();
((BLangWindow) windowClauseNode).pos = pos;
windowClauseNode.addWS(ws);
windowClauseNode.setFunctionInvocation(this.exprNodeStack.pop());
if (this.exprNodeStack.size() > 1) {
List<ExpressionNode> exprList = new ArrayList<>();
addExprToExprNodeList(exprList, this.exprNodeStack.size() - 1);
StreamingInput streamingInput = this.streamingInputStack.peek();
streamingInput.setPreFunctionInvocations(exprList);
}
if (!this.whereClauseStack.empty()) {
this.streamingInputStack.peek().setWindowTraversedAfterWhere(true);
} else {
this.streamingInputStack.peek().setWindowTraversedAfterWhere(false);
}
}
void startStreamingInputNode(DiagnosticPos pos) {
StreamingInput streamingInput = TreeBuilder.createStreamingInputNode();
((BLangStreamingInput) streamingInput).pos = pos;
this.streamingInputStack.push(streamingInput);
}
void endStreamingInputNode(String alias, DiagnosticPos pos,
Set<Whitespace> ws) {
BLangStreamingInput streamingInput = (BLangStreamingInput) this.streamingInputStack.peek();
streamingInput.pos = pos;
streamingInput.addWS(ws);
if (this.whereClauseStack.size() == 2) {
streamingInput.setAfterStreamingCondition(this.whereClauseStack.pop());
streamingInput.setBeforeStreamingCondition(this.whereClauseStack.pop());
} else if (this.whereClauseStack.size() == 1) {
if (streamingInput.isWindowTraversedAfterWhere()) {
streamingInput.setBeforeStreamingCondition(this.whereClauseStack.pop());
} else {
streamingInput.setAfterStreamingCondition(this.whereClauseStack.pop());
}
}
if (this.exprNodeStack.size() > 1) {
List<ExpressionNode> exprList = new ArrayList<>();
addExprToExprNodeList(exprList, this.exprNodeStack.size() - 1);
streamingInput.setPostFunctionInvocations(exprList);
}
if (!this.windowClausesStack.empty()) {
streamingInput.setWindowClause(this.windowClausesStack.pop());
}
streamingInput.setStreamReference(this.exprNodeStack.pop());
streamingInput.setAlias(alias);
}
void startJoinStreamingInputNode(DiagnosticPos pos) {
JoinStreamingInput joinStreamingInput = TreeBuilder.createJoinStreamingInputNode();
((BLangJoinStreamingInput) joinStreamingInput).pos = pos;
this.joinStreamingInputsStack.push(joinStreamingInput);
}
void endJoinStreamingInputNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isUnidirectionalBeforeJoin,
boolean isUnidirectionalAfterJoin, String joinType) {
JoinStreamingInput joinStreamingInput = this.joinStreamingInputsStack.peek();
((BLangJoinStreamingInput) joinStreamingInput).pos = pos;
joinStreamingInput.addWS(ws);
joinStreamingInput.setStreamingInput(this.streamingInputStack.pop());
if (this.exprNodeStack.size() > 0) {
joinStreamingInput.setOnExpression(this.exprNodeStack.pop());
}
joinStreamingInput.setUnidirectionalBeforeJoin(isUnidirectionalBeforeJoin);
joinStreamingInput.setUnidirectionalAfterJoin(isUnidirectionalAfterJoin);
joinStreamingInput.setJoinType(joinType);
}
void endJoinType(Set<Whitespace> ws) {
JoinStreamingInput joinStreamingInput = this.joinStreamingInputsStack.peek();
joinStreamingInput.addWS(ws);
}
void startTableQueryNode(DiagnosticPos pos) {
TableQuery tableQuery = TreeBuilder.createTableQueryNode();
((BLangTableQuery) tableQuery).pos = pos;
this.tableQueriesStack.push(tableQuery);
}
void endTableQueryNode(boolean isJoinClauseAvailable, boolean isSelectClauseAvailable,
boolean isOrderByClauseAvailable, boolean isLimitClauseAvailable, DiagnosticPos pos,
Set<Whitespace> ws) {
BLangTableQuery tableQuery = (BLangTableQuery) this.tableQueriesStack.peek();
tableQuery.pos = pos;
tableQuery.addWS(ws);
tableQuery.setStreamingInput(this.streamingInputStack.pop());
if (isJoinClauseAvailable) {
tableQuery.setJoinStreamingInput(this.joinStreamingInputsStack.pop());
}
if (isSelectClauseAvailable) {
tableQuery.setSelectClause(this.selectClausesStack.pop());
}
if (isOrderByClauseAvailable) {
tableQuery.setOrderByClause(this.orderByClauseStack.pop());
}
if (isLimitClauseAvailable) {
tableQuery.setLimitClause(this.limitClauseStack.pop());
}
}
void addTableQueryExpression(DiagnosticPos pos, Set<Whitespace> ws) {
TableQueryExpression tableQueryExpression = TreeBuilder.createTableQueryExpression();
((BLangTableQueryExpression) tableQueryExpression).pos = pos;
tableQueryExpression.addWS(ws);
tableQueryExpression.setTableQuery(tableQueriesStack.pop());
this.exprNodeStack.push(tableQueryExpression);
}
void startSetAssignmentClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
SetAssignmentNode setAssignmentNode = TreeBuilder.createSetAssignmentNode();
((BLangSetAssignment) setAssignmentNode).pos = pos;
setAssignmentNode.addWS(ws);
this.setAssignmentStack.push(setAssignmentNode);
}
void endSetAssignmentClauseNode(DiagnosticPos pos, Set<Whitespace> ws) {
if (this.exprNodeStack.empty()) {
throw new IllegalStateException("Expression stack cannot be empty in processing a Set Assignment Clause");
}
SetAssignmentNode setAssignmentNode = this.setAssignmentStack.peek();
((BLangSetAssignment) setAssignmentNode).pos = pos;
setAssignmentNode.addWS(ws);
setAssignmentNode.setExpression(exprNodeStack.pop());
setAssignmentNode.setVariableReference(exprNodeStack.pop());
}
void startSetClauseNode() {
this.setAssignmentListStack.push(new ArrayList<>());
}
void endSetClauseNode(Set<Whitespace> ws, int selectExprCount) {
List<SetAssignmentNode> setAssignmentNodeList = this.setAssignmentListStack.peek();
addSetAssignmentToSelectAssignmentNodeList(setAssignmentNodeList, selectExprCount);
}
private void addSetAssignmentToSelectAssignmentNodeList(List<SetAssignmentNode> setAssignmentNodeList, int n) {
if (this.setAssignmentStack.empty()) {
throw new IllegalStateException("Set expression stack cannot be empty in processing a SelectClause");
}
SetAssignmentNode expr = this.setAssignmentStack.pop();
if (n > 1) {
addSetAssignmentToSelectAssignmentNodeList(setAssignmentNodeList, n - 1);
}
setAssignmentNodeList.add(expr);
}
void startStreamActionNode(DiagnosticPos pos, PackageID packageID) {
StreamActionNode streamActionNode = TreeBuilder.createStreamActionNode();
((BLangStreamAction) streamActionNode).pos = pos;
this.streamActionNodeStack.push(streamActionNode);
this.startLambdaFunctionDef(packageID);
this.startBlock();
}
void endStreamActionNode(DiagnosticPos pos, Set<Whitespace> ws) {
endCallableUnitBody(ws);
StreamActionNode streamActionNode = this.streamActionNodeStack.peek();
((BLangStreamAction) streamActionNode).pos = pos;
streamActionNode.addWS(ws);
this.varListStack.push(new ArrayList<>());
this.varListStack.peek().add(this.varStack.pop());
this.commaWsStack.push(ws);
this.addLambdaFunctionDef(pos, ws, true, false, false);
streamActionNode.setInvokableBody((BLangLambdaFunction) this.exprNodeStack.pop());
}
void startPatternStreamingEdgeInputNode(DiagnosticPos pos) {
PatternStreamingEdgeInputNode patternStreamingEdgeInputNode = TreeBuilder.createPatternStreamingEdgeInputNode();
((BLangPatternStreamingEdgeInput) patternStreamingEdgeInputNode).pos = pos;
this.patternStreamingEdgeInputStack.push(patternStreamingEdgeInputNode);
}
void endPatternStreamingEdgeInputNode(DiagnosticPos pos, Set<Whitespace> ws, String alias) {
PatternStreamingEdgeInputNode patternStreamingEdgeInputNode = this.patternStreamingEdgeInputStack.peek();
((BLangPatternStreamingEdgeInput) patternStreamingEdgeInputNode).pos = pos;
patternStreamingEdgeInputNode.addWS(ws);
if (exprNodeStack.size() == 2) {
patternStreamingEdgeInputNode.setExpression(exprNodeStack.pop());
patternStreamingEdgeInputNode.setStreamReference(exprNodeStack.pop());
} else if (exprNodeStack.size() == 1) {
patternStreamingEdgeInputNode.setStreamReference(exprNodeStack.pop());
}
if (!whereClauseStack.empty()) {
patternStreamingEdgeInputNode.setWhereClause(whereClauseStack.pop());
}
patternStreamingEdgeInputNode.setAliasIdentifier(alias);
}
void startPatternStreamingInputNode(DiagnosticPos pos) {
PatternStreamingInputNode patternStreamingInputNode = TreeBuilder.createPatternStreamingInputNode();
((BLangPatternStreamingInput) patternStreamingInputNode).pos = pos;
this.patternStreamingInputStack.push(patternStreamingInputNode);
}
void endPatternStreamingInputNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isFollowedBy,
boolean enclosedInParenthesis, boolean andWithNotAvailable,
boolean forWithNotAvailable, boolean onlyAndAvailable,
boolean onlyOrAvailable, boolean commaSeparated,
String timeDurationValue, String timeScale) {
if (!this.patternStreamingInputStack.empty()) {
PatternStreamingInputNode patternStreamingInputNode = this.patternStreamingInputStack.pop();
((BLangPatternStreamingInput) patternStreamingInputNode).pos = pos;
patternStreamingInputNode.addWS(ws);
if (isFollowedBy) {
processFollowedByPattern(patternStreamingInputNode);
}
if (enclosedInParenthesis) {
processEnclosedPattern(patternStreamingInputNode);
}
if (andWithNotAvailable) {
processNegationPattern(patternStreamingInputNode);
}
if (onlyAndAvailable) {
processPatternWithAndCondition(patternStreamingInputNode);
}
if (onlyOrAvailable) {
processPatternWithOrCondition(patternStreamingInputNode);
}
if (forWithNotAvailable) {
processNegationPatternWithTimeDuration(patternStreamingInputNode, timeDurationValue, timeScale);
}
if (commaSeparated) {
processCommaSeparatedSequence(patternStreamingInputNode);
}
if (!(isFollowedBy || enclosedInParenthesis || forWithNotAvailable ||
onlyAndAvailable || onlyOrAvailable || andWithNotAvailable || commaSeparated)) {
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
}
if (this.patternStreamingInputStack.empty()) {
this.patternStreamingInputStack.push(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = null;
}
}
private void processCommaSeparatedSequence(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setCommaSeparated(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.setPatternStreamingInput(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processNegationPatternWithTimeDuration(PatternStreamingInputNode patternStreamingInputNode,
String timeDurationValue, String timeScale) {
patternStreamingInputNode.setForWithNot(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.setTimeDurationValue(timeDurationValue);
patternStreamingInputNode.setTimeScale(timeScale);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processPatternWithOrCondition(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setOrOnly(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processPatternWithAndCondition(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setAndOnly(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processNegationPattern(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setAndWithNot(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processEnclosedPattern(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setEnclosedInParenthesis(true);
patternStreamingInputNode.setPatternStreamingInput(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
private void processFollowedByPattern(PatternStreamingInputNode patternStreamingInputNode) {
patternStreamingInputNode.setFollowedBy(true);
patternStreamingInputNode.addPatternStreamingEdgeInput(this.patternStreamingEdgeInputStack.pop());
patternStreamingInputNode.setPatternStreamingInput(this.recentStreamingPatternInputNode);
this.recentStreamingPatternInputNode = patternStreamingInputNode;
}
void startStreamingQueryStatementNode(DiagnosticPos pos) {
StreamingQueryStatementNode streamingQueryStatementNode = TreeBuilder.createStreamingQueryStatementNode();
((BLangStreamingQueryStatement) streamingQueryStatementNode).pos = pos;
this.streamingQueryStatementStack.push(streamingQueryStatementNode);
}
void endStreamingQueryStatementNode(DiagnosticPos pos, Set<Whitespace> ws) {
StreamingQueryStatementNode streamingQueryStatementNode = this.streamingQueryStatementStack.peek();
((BLangStreamingQueryStatement) streamingQueryStatementNode).pos = pos;
streamingQueryStatementNode.addWS(ws);
if (!streamingInputStack.empty()) {
streamingQueryStatementNode.setStreamingInput(streamingInputStack.pop());
if (!joinStreamingInputsStack.empty()) {
streamingQueryStatementNode.setJoinStreamingInput(joinStreamingInputsStack.pop());
}
} else if (!patternClauseStack.empty()) {
streamingQueryStatementNode.setPatternClause(patternClauseStack.pop());
}
if (!selectClausesStack.empty()) {
streamingQueryStatementNode.setSelectClause(selectClausesStack.pop());
} else {
SelectClauseNode selectClauseNode = new BLangSelectClause();
selectClauseNode.setSelectAll(true);
streamingQueryStatementNode.setSelectClause(selectClauseNode);
}
if (!orderByClauseStack.empty()) {
streamingQueryStatementNode.setOrderByClause(orderByClauseStack.pop());
}
if (!outputRateLimitStack.empty()) {
streamingQueryStatementNode.setOutputRateLimitNode(outputRateLimitStack.pop());
}
streamingQueryStatementNode.setStreamingAction(streamActionNodeStack.pop());
}
void startOutputRateLimitNode(DiagnosticPos pos) {
OutputRateLimitNode outputRateLimit = TreeBuilder.createOutputRateLimitNode();
((BLangOutputRateLimit) outputRateLimit).pos = pos;
this.outputRateLimitStack.push(outputRateLimit);
}
void endOutputRateLimitNode(DiagnosticPos pos, Set<Whitespace> ws, boolean isSnapshotOutputRateLimit,
boolean isFirst, boolean isLast, boolean isAll, String timeScale,
String rateLimitValue) {
OutputRateLimitNode outputRateLimit = this.outputRateLimitStack.peek();
((BLangOutputRateLimit) outputRateLimit).pos = pos;
outputRateLimit.addWS(ws);
outputRateLimit.setSnapshot(isSnapshotOutputRateLimit);
outputRateLimit.setOutputRateType(isFirst, isLast, isAll);
outputRateLimit.setTimeScale(timeScale);
outputRateLimit.setRateLimitValue(rateLimitValue);
}
void startWithinClause(DiagnosticPos pos) {
WithinClause withinClause = TreeBuilder.createWithinClause();
((BLangWithinClause) withinClause).pos = pos;
this.withinClauseStack.push(withinClause);
}
void endWithinClause(DiagnosticPos pos, Set<Whitespace> ws, String timeDurationValue, String timeScale) {
WithinClause withinClause = this.withinClauseStack.peek();
((BLangWithinClause) withinClause).pos = pos;
withinClause.addWS(ws);
withinClause.setTimeDurationValue(timeDurationValue);
withinClause.setTimeScale(timeScale);
}
void startPatternClause(DiagnosticPos pos) {
PatternClause patternClause = TreeBuilder.createPatternClause();
((BLangPatternClause) patternClause).pos = pos;
this.patternClauseStack.push(patternClause);
}
void endPatternClause(boolean isForEvents, boolean isWithinClauseAvailable, DiagnosticPos pos,
Set<Whitespace> ws) {
PatternClause patternClause = this.patternClauseStack.peek();
((BLangPatternClause) patternClause).pos = pos;
patternClause.addWS(ws);
patternClause.setForAllEvents(isForEvents);
patternClause.setPatternStreamingInputNode(this.patternStreamingInputStack.pop());
if (isWithinClauseAvailable) {
patternClause.setWithinClause(this.withinClauseStack.pop());
}
}
void startForeverNode(DiagnosticPos pos) {
ForeverNode foreverNode = TreeBuilder.createForeverNode();
((BLangForever) foreverNode).pos = pos;
this.foreverNodeStack.push(foreverNode);
}
void endForeverNode(DiagnosticPos pos, Set<Whitespace> ws) {
ForeverNode foreverNode = this.foreverNodeStack.pop();
((BLangForever) foreverNode).pos = pos;
foreverNode.addWS(ws);
if (!this.varListStack.empty()) {
this.varListStack.pop().forEach(param -> foreverNode.addParameter((SimpleVariableNode) param));
}
Collections.reverse(streamingQueryStatementStack);
while (!streamingQueryStatementStack.empty()) {
foreverNode.addStreamingQueryStatement(streamingQueryStatementStack.pop());
}
addStmtToCurrentBlock(foreverNode);
if (!foreverNode.isSiddhiRuntimeEnabled()) {
List<String> nameComps = getPackageNameComps(Names.STREAMS_MODULE.value);
addImportPackageDeclaration(pos, null, Names.STREAMS_ORG.value, nameComps, null,
nameComps.get(nameComps.size() - 1));
}
}
BLangLambdaFunction getScopesFunctionDef(DiagnosticPos pos, Set<Whitespace> ws, boolean bodyExists, String name) {
BLangFunction function = (BLangFunction) this.invokableNodeStack.pop();
function.pos = pos;
function.addWS(ws);
function.flagSet.add(Flag.PUBLIC);
function.flagSet.add(Flag.LAMBDA);
if (!bodyExists) {
function.body = null;
}
BLangIdentifier nameId = new BLangIdentifier();
nameId.setValue(Names.GEN_VAR_PREFIX + name);
function.name = nameId;
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = pos;
typeNode.typeKind = TypeKind.NIL;
function.returnTypeNode = typeNode;
function.receiver = null;
BLangLambdaFunction lambda = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambda.function = function;
return lambda;
}
public void addTypeReference(DiagnosticPos currentPos, Set<Whitespace> ws) {
TypeNode typeRef = typeNodeStack.pop();
typeRef.addWS(ws);
BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeNodeStack.peek();
structureTypeNode.addTypeReference(typeRef);
}
public void createTypeTestExpression(DiagnosticPos pos, Set<Whitespace> ws) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = (BLangExpression) this.exprNodeStack.pop();
typeTestExpr.typeNode = (BLangType) this.typeNodeStack.pop();
typeTestExpr.pos = pos;
typeTestExpr.addWS(ws);
addExpressionNode(typeTestExpr);
}
void handleWait(DiagnosticPos currentPos, Set<Whitespace> ws) {
BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode();
waitExpr.exprList = Collections.singletonList((BLangExpression) this.exprNodeStack.pop());
waitExpr.pos = currentPos;
waitExpr.addWS(ws);
addExpressionNode(waitExpr);
}
void startWaitForAll() {
BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode();
waitCollectionStack.push(bLangWaitForAll);
}
void handleWaitForAll(DiagnosticPos pos, Set<Whitespace> ws) {
BLangWaitForAllExpr waitForAllExpr = waitCollectionStack.pop();
waitForAllExpr.pos = pos;
waitForAllExpr.addWS(ws);
addExpressionNode(waitForAllExpr);
}
void addKeyValueToWaitForAll(DiagnosticPos pos, Set<Whitespace> ws, String identifier, boolean containsExpr) {
BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode();
keyValue.addWS(ws);
keyValue.pos = pos;
BLangIdentifier key = (BLangIdentifier) TreeBuilder.createIdentifierNode();
key.setLiteral(false);
key.setValue(identifier);
keyValue.key = key;
if (containsExpr) {
keyValue.valueExpr = (BLangExpression) exprNodeStack.pop();
} else {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = pos;
varRef.variableName = key;
varRef.addWS(ws);
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.keyExpr = varRef;
}
waitCollectionStack.peek().keyValuePairs.add(keyValue);
}
} |
Why should we add `synchronized`? `flush()` is already `synchronized`, right? | public void open(int taskNumber, int numTasks) throws IOException {
try {
establishConnection();
if (keyFields == null || keyFields.length == 0) {
String insertSQL = dialect.getInsertIntoStatement(tableName, fieldNames);
jdbcWriter = new AppendOnlyWriter(insertSQL, fieldTypes);
} else {
jdbcWriter = UpsertWriter.create(
dialect, tableName, fieldNames, fieldTypes, keyFields,
getRuntimeContext().getExecutionConfig().isObjectReuseEnabled());
}
jdbcWriter.open(connection);
} catch (SQLException sqe) {
throw new IllegalArgumentException("open() failed.", sqe);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("JDBC driver class not found.", cnfe);
}
if (flushIntervalMills != 0) {
this.scheduler = Executors.newScheduledThreadPool(
1, new ExecutorThreadFactory("jdbc-upsert-output-format"));
this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(() -> {
synchronized (JDBCUpsertOutputFormat.this) {
if (closed) {
return;
}
try {
flush();
} catch (Exception e) {
flushException = e;
}
}
}, flushIntervalMills, flushIntervalMills, TimeUnit.MILLISECONDS);
}
} | synchronized (JDBCUpsertOutputFormat.this) { | public void open(int taskNumber, int numTasks) throws IOException {
try {
establishConnection();
if (keyFields == null || keyFields.length == 0) {
String insertSQL = dialect.getInsertIntoStatement(tableName, fieldNames);
jdbcWriter = new AppendOnlyWriter(insertSQL, fieldTypes);
} else {
jdbcWriter = UpsertWriter.create(
dialect, tableName, fieldNames, fieldTypes, keyFields,
getRuntimeContext().getExecutionConfig().isObjectReuseEnabled());
}
jdbcWriter.open(connection);
} catch (SQLException sqe) {
throw new IllegalArgumentException("open() failed.", sqe);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("JDBC driver class not found.", cnfe);
}
if (flushIntervalMills != 0) {
this.scheduler = Executors.newScheduledThreadPool(
1, new ExecutorThreadFactory("jdbc-upsert-output-format"));
this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(() -> {
synchronized (JDBCUpsertOutputFormat.this) {
if (closed) {
return;
}
try {
flush();
} catch (Exception e) {
flushException = e;
}
}
}, flushIntervalMills, flushIntervalMills, TimeUnit.MILLISECONDS);
}
} | class JDBCUpsertOutputFormat extends AbstractJDBCOutputFormat<Tuple2<Boolean, Row>> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(JDBCUpsertOutputFormat.class);
static final int DEFAULT_MAX_RETRY_TIMES = 3;
private final String tableName;
private final JDBCDialect dialect;
private final String[] fieldNames;
private final String[] keyFields;
private final int[] fieldTypes;
private final int flushMaxSize;
private final long flushIntervalMills;
private final int maxRetryTimes;
private transient JDBCWriter jdbcWriter;
private transient int batchCount = 0;
private transient volatile boolean closed = false;
private transient ScheduledExecutorService scheduler;
private transient ScheduledFuture scheduledFuture;
private transient volatile Exception flushException;
public JDBCUpsertOutputFormat(
JDBCOptions options,
String[] fieldNames,
String[] keyFields,
int[] fieldTypes,
int flushMaxSize,
long flushIntervalMills,
int maxRetryTimes) {
super(options.getUsername(), options.getPassword(), options.getDriverName(), options.getDbURL());
this.tableName = options.getTableName();
this.dialect = options.getDialect();
this.fieldNames = fieldNames;
this.keyFields = keyFields;
this.fieldTypes = fieldTypes;
this.flushMaxSize = flushMaxSize;
this.flushIntervalMills = flushIntervalMills;
this.maxRetryTimes = maxRetryTimes;
}
/**
* Connects to the target database and initializes the prepared statement.
*
* @param taskNumber The number of the parallel instance.
* @throws IOException Thrown, if the output could not be opened due to an
* I/O problem.
*/
@Override
private void checkFlushException() {
if (flushException != null) {
throw new RuntimeException("Writing records to JDBC failed.", flushException);
}
}
@Override
public synchronized void writeRecord(Tuple2<Boolean, Row> tuple2) throws IOException {
checkFlushException();
try {
jdbcWriter.addRecord(tuple2);
batchCount++;
if (batchCount >= flushMaxSize) {
flush();
}
} catch (Exception e) {
throw new RuntimeException("Writing records to JDBC failed.", e);
}
}
public synchronized void flush() throws Exception {
checkFlushException();
for (int i = 1; i <= maxRetryTimes; i++) {
try {
jdbcWriter.executeBatch();
batchCount = 0;
break;
} catch (SQLException e) {
LOG.error("JDBC executeBatch error, retry times = {}", i, e);
if (i >= maxRetryTimes) {
throw e;
}
Thread.sleep(1000 * i);
}
}
}
/**
* Executes prepared statement and closes all resources of this instance.
*
* @throws IOException Thrown, if the input could not be closed properly.
*/
@Override
public synchronized void close() throws IOException {
if (closed) {
return;
}
closed = true;
checkFlushException();
if (this.scheduledFuture != null) {
scheduledFuture.cancel(false);
this.scheduler.shutdown();
}
if (batchCount > 0) {
try {
flush();
} catch (Exception e) {
throw new RuntimeException("Writing records to JDBC failed.", e);
}
}
try {
jdbcWriter.close();
} catch (SQLException e) {
LOG.warn("Close JDBC writer failed.", e);
}
closeDbConnection();
}
public static Builder builder() {
return new Builder();
}
/**
* Builder for a {@link JDBCUpsertOutputFormat}.
*/
public static class Builder {
private JDBCOptions options;
private String[] fieldNames;
private String[] keyFields;
private int[] fieldTypes;
private int flushMaxSize = DEFAULT_FLUSH_MAX_SIZE;
private long flushIntervalMills = DEFAULT_FLUSH_INTERVAL_MILLS;
private int maxRetryTimes = DEFAULT_MAX_RETRY_TIMES;
/**
* required, jdbc options.
*/
public Builder setOptions(JDBCOptions options) {
this.options = options;
return this;
}
/**
* required, field names of this jdbc sink.
*/
public Builder setFieldNames(String[] fieldNames) {
this.fieldNames = fieldNames;
return this;
}
/**
* required, upsert unique keys.
*/
public Builder setKeyFields(String[] keyFields) {
this.keyFields = keyFields;
return this;
}
/**
* required, field types of this jdbc sink.
*/
public Builder setFieldTypes(int[] fieldTypes) {
this.fieldTypes = fieldTypes;
return this;
}
/**
* optional, flush max size (includes all append, upsert and delete records),
* over this number of records, will flush data.
*/
public Builder setFlushMaxSize(int flushMaxSize) {
this.flushMaxSize = flushMaxSize;
return this;
}
/**
* optional, flush interval mills, over this time, asynchronous threads will flush data.
*/
public Builder setFlushIntervalMills(long flushIntervalMills) {
this.flushIntervalMills = flushIntervalMills;
return this;
}
/**
* optional, max retry times for jdbc connector.
*/
public Builder setMaxRetryTimes(int maxRetryTimes) {
this.maxRetryTimes = maxRetryTimes;
return this;
}
/**
* Finalizes the configuration and checks validity.
*
* @return Configured JDBCUpsertOutputFormat
*/
public JDBCUpsertOutputFormat build() {
checkNotNull(options, "No options supplied.");
checkNotNull(fieldNames, "No fieldNames supplied.");
return new JDBCUpsertOutputFormat(
options, fieldNames, keyFields, fieldTypes, flushMaxSize, flushIntervalMills, maxRetryTimes);
}
}
} | class JDBCUpsertOutputFormat extends AbstractJDBCOutputFormat<Tuple2<Boolean, Row>> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(JDBCUpsertOutputFormat.class);
static final int DEFAULT_MAX_RETRY_TIMES = 3;
private final String tableName;
private final JDBCDialect dialect;
private final String[] fieldNames;
private final String[] keyFields;
private final int[] fieldTypes;
private final int flushMaxSize;
private final long flushIntervalMills;
private final int maxRetryTimes;
private transient JDBCWriter jdbcWriter;
private transient int batchCount = 0;
private transient volatile boolean closed = false;
private transient ScheduledExecutorService scheduler;
private transient ScheduledFuture scheduledFuture;
private transient volatile Exception flushException;
public JDBCUpsertOutputFormat(
JDBCOptions options,
String[] fieldNames,
String[] keyFields,
int[] fieldTypes,
int flushMaxSize,
long flushIntervalMills,
int maxRetryTimes) {
super(options.getUsername(), options.getPassword(), options.getDriverName(), options.getDbURL());
this.tableName = options.getTableName();
this.dialect = options.getDialect();
this.fieldNames = fieldNames;
this.keyFields = keyFields;
this.fieldTypes = fieldTypes;
this.flushMaxSize = flushMaxSize;
this.flushIntervalMills = flushIntervalMills;
this.maxRetryTimes = maxRetryTimes;
}
/**
* Connects to the target database and initializes the prepared statement.
*
* @param taskNumber The number of the parallel instance.
* @throws IOException Thrown, if the output could not be opened due to an
* I/O problem.
*/
@Override
private void checkFlushException() {
if (flushException != null) {
throw new RuntimeException("Writing records to JDBC failed.", flushException);
}
}
@Override
public synchronized void writeRecord(Tuple2<Boolean, Row> tuple2) throws IOException {
checkFlushException();
try {
jdbcWriter.addRecord(tuple2);
batchCount++;
if (batchCount >= flushMaxSize) {
flush();
}
} catch (Exception e) {
throw new RuntimeException("Writing records to JDBC failed.", e);
}
}
public synchronized void flush() throws Exception {
checkFlushException();
for (int i = 1; i <= maxRetryTimes; i++) {
try {
jdbcWriter.executeBatch();
batchCount = 0;
break;
} catch (SQLException e) {
LOG.error("JDBC executeBatch error, retry times = {}", i, e);
if (i >= maxRetryTimes) {
throw e;
}
Thread.sleep(1000 * i);
}
}
}
/**
* Executes prepared statement and closes all resources of this instance.
*
* @throws IOException Thrown, if the input could not be closed properly.
*/
@Override
public synchronized void close() throws IOException {
if (closed) {
return;
}
closed = true;
checkFlushException();
if (this.scheduledFuture != null) {
scheduledFuture.cancel(false);
this.scheduler.shutdown();
}
if (batchCount > 0) {
try {
flush();
} catch (Exception e) {
throw new RuntimeException("Writing records to JDBC failed.", e);
}
}
try {
jdbcWriter.close();
} catch (SQLException e) {
LOG.warn("Close JDBC writer failed.", e);
}
closeDbConnection();
}
public static Builder builder() {
return new Builder();
}
/**
* Builder for a {@link JDBCUpsertOutputFormat}.
*/
public static class Builder {
private JDBCOptions options;
private String[] fieldNames;
private String[] keyFields;
private int[] fieldTypes;
private int flushMaxSize = DEFAULT_FLUSH_MAX_SIZE;
private long flushIntervalMills = DEFAULT_FLUSH_INTERVAL_MILLS;
private int maxRetryTimes = DEFAULT_MAX_RETRY_TIMES;
/**
* required, jdbc options.
*/
public Builder setOptions(JDBCOptions options) {
this.options = options;
return this;
}
/**
* required, field names of this jdbc sink.
*/
public Builder setFieldNames(String[] fieldNames) {
this.fieldNames = fieldNames;
return this;
}
/**
* required, upsert unique keys.
*/
public Builder setKeyFields(String[] keyFields) {
this.keyFields = keyFields;
return this;
}
/**
* required, field types of this jdbc sink.
*/
public Builder setFieldTypes(int[] fieldTypes) {
this.fieldTypes = fieldTypes;
return this;
}
/**
* optional, flush max size (includes all append, upsert and delete records),
* over this number of records, will flush data.
*/
public Builder setFlushMaxSize(int flushMaxSize) {
this.flushMaxSize = flushMaxSize;
return this;
}
/**
* optional, flush interval mills, over this time, asynchronous threads will flush data.
*/
public Builder setFlushIntervalMills(long flushIntervalMills) {
this.flushIntervalMills = flushIntervalMills;
return this;
}
/**
* optional, max retry times for jdbc connector.
*/
public Builder setMaxRetryTimes(int maxRetryTimes) {
this.maxRetryTimes = maxRetryTimes;
return this;
}
/**
* Finalizes the configuration and checks validity.
*
* @return Configured JDBCUpsertOutputFormat
*/
public JDBCUpsertOutputFormat build() {
checkNotNull(options, "No options supplied.");
checkNotNull(fieldNames, "No fieldNames supplied.");
return new JDBCUpsertOutputFormat(
options, fieldNames, keyFields, fieldTypes, flushMaxSize, flushIntervalMills, maxRetryTimes);
}
}
} |
Yes, this new property only affects whether the column would be assigned a unique id or not. Alter table job will try to use the light schema change optimization (like add new value column) if the columns of the tables are assigned unique id. | private void createOlapTable(Database db, CreateTableStmt stmt) throws UserException {
String tableName = stmt.getTableName();
LOG.debug("begin create olap table: {}", tableName);
List<Column> baseSchema = stmt.getColumns();
validateColumns(baseSchema);
PartitionDesc partitionDesc = stmt.getPartitionDesc();
PartitionInfo partitionInfo = null;
Map<String, Long> partitionNameToId = Maps.newHashMap();
if (partitionDesc != null) {
PartitionDesc partDesc = partitionDesc;
for (SinglePartitionDesc desc : partDesc.getSinglePartitionDescs()) {
long partitionId = Env.getCurrentEnv().getNextId();
partitionNameToId.put(desc.getPartitionName(), partitionId);
}
partitionInfo = partitionDesc.toPartitionInfo(baseSchema, partitionNameToId, false);
} else {
if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(stmt.getProperties())) {
throw new DdlException("Only support dynamic partition properties on range partition table");
}
long partitionId = Env.getCurrentEnv().getNextId();
partitionNameToId.put(tableName, partitionId);
partitionInfo = new SinglePartitionInfo();
}
KeysDesc keysDesc = stmt.getKeysDesc();
Preconditions.checkNotNull(keysDesc);
KeysType keysType = keysDesc.getKeysType();
DistributionDesc distributionDesc = stmt.getDistributionDesc();
Preconditions.checkNotNull(distributionDesc);
DistributionInfo defaultDistributionInfo = distributionDesc.toDistributionInfo(baseSchema);
short shortKeyColumnCount = Env.calcShortKeyColumnCount(baseSchema, stmt.getProperties());
LOG.debug("create table[{}] short key column count: {}", tableName, shortKeyColumnCount);
TableIndexes indexes = new TableIndexes(stmt.getIndexes());
long tableId = Env.getCurrentEnv().getNextId();
OlapTable olapTable = new OlapTable(tableId, tableName, baseSchema, keysType, partitionInfo,
defaultDistributionInfo, indexes);
olapTable.setComment(stmt.getComment());
long baseIndexId = Env.getCurrentEnv().getNextId();
olapTable.setBaseIndexId(baseIndexId);
Map<String, String> properties = stmt.getProperties();
Boolean useLightSchemaChange = true;
try {
useLightSchemaChange = PropertyAnalyzer.analyzeUseLightSchemaChange(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setUseLightSchemaChange(useLightSchemaChange);
if (useLightSchemaChange) {
LOG.info("table: {} uses light schema change", olapTable.getName());
for (Column column : baseSchema) {
column.setUniqueId(olapTable.incAndGetMaxColUniqueId());
LOG.debug("table: {}, newColumn: {}, uniqueId: {}", olapTable.getName(), column.getName(),
column.getUniqueId());
}
} else {
LOG.info("table: {} doesn't use light schema change", olapTable.getName());
}
TStorageFormat storageFormat = TStorageFormat.V2;
try {
storageFormat = PropertyAnalyzer.analyzeStorageFormat(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setStorageFormat(storageFormat);
TCompressionType compressionType = TCompressionType.LZ4;
try {
compressionType = PropertyAnalyzer.analyzeCompressionType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setCompressionType(compressionType);
DataSortInfo dataSortInfo = PropertyAnalyzer.analyzeDataSortInfo(properties, keysType,
keysDesc.keysColumnSize(), storageFormat);
olapTable.setDataSortInfo(dataSortInfo);
boolean enableUniqueKeyMergeOnWrite = false;
try {
enableUniqueKeyMergeOnWrite = PropertyAnalyzer.analyzeUniqueKeyMergeOnWrite(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setEnableUniqueKeyMergeOnWrite(enableUniqueKeyMergeOnWrite);
Set<String> bfColumns = null;
double bfFpp = 0;
try {
bfColumns = PropertyAnalyzer.analyzeBloomFilterColumns(properties, baseSchema, keysType);
if (bfColumns != null && bfColumns.isEmpty()) {
bfColumns = null;
}
bfFpp = PropertyAnalyzer.analyzeBloomFilterFpp(properties);
if (bfColumns != null && bfFpp == 0) {
bfFpp = FeConstants.default_bloom_filter_fpp;
} else if (bfColumns == null) {
bfFpp = 0;
}
olapTable.setBloomFilterInfo(bfColumns, bfFpp);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
ReplicaAllocation replicaAlloc = PropertyAnalyzer.analyzeReplicaAllocation(properties, "");
if (replicaAlloc.isNotSet()) {
replicaAlloc = ReplicaAllocation.DEFAULT_ALLOCATION;
}
olapTable.setReplicationAllocation(replicaAlloc);
boolean isInMemory = PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_INMEMORY,
false);
olapTable.setIsInMemory(isInMemory);
String remoteStoragePolicy = PropertyAnalyzer.analyzeRemoteStoragePolicy(properties);
olapTable.setRemoteStoragePolicy(remoteStoragePolicy);
String storagePolicy = PropertyAnalyzer.analyzeStoragePolicy(properties);
Env.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(storagePolicy);
olapTable.setStoragePolicy(storagePolicy);
TTabletType tabletType;
try {
tabletType = PropertyAnalyzer.analyzeTabletType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
long partitionId = partitionNameToId.get(tableName);
DataProperty dataProperty = null;
try {
dataProperty = PropertyAnalyzer.analyzeDataProperty(stmt.getProperties(),
DataProperty.DEFAULT_DATA_PROPERTY);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(dataProperty);
partitionInfo.setDataProperty(partitionId, dataProperty);
partitionInfo.setReplicaAllocation(partitionId, replicaAlloc);
partitionInfo.setIsInMemory(partitionId, isInMemory);
partitionInfo.setTabletType(partitionId, tabletType);
}
try {
String colocateGroup = PropertyAnalyzer.analyzeColocate(properties);
if (colocateGroup != null) {
if (defaultDistributionInfo.getType() == DistributionInfoType.RANDOM) {
throw new AnalysisException("Random distribution for colocate table is unsupported");
}
String fullGroupName = db.getId() + "_" + colocateGroup;
ColocateGroupSchema groupSchema = Env.getCurrentColocateIndex().getGroupSchema(fullGroupName);
if (groupSchema != null) {
groupSchema.checkColocateSchema(olapTable);
}
Env.getCurrentColocateIndex()
.addTableToGroup(db.getId(), olapTable, colocateGroup, null /* generate group id inside */);
olapTable.setColocateGroup(colocateGroup);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
TStorageType baseIndexStorageType = null;
try {
baseIndexStorageType = PropertyAnalyzer.analyzeStorageType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(baseIndexStorageType);
int schemaVersion = 0;
try {
schemaVersion = PropertyAnalyzer.analyzeSchemaVersion(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
int schemaHash = Util.generateSchemaHash();
olapTable.setIndexMeta(baseIndexId, tableName, baseSchema, schemaVersion, schemaHash, shortKeyColumnCount,
baseIndexStorageType, keysType);
for (AlterClause alterClause : stmt.getRollupAlterClauseList()) {
AddRollupClause addRollupClause = (AddRollupClause) alterClause;
Long baseRollupIndex = olapTable.getIndexIdByName(tableName);
TStorageType rollupIndexStorageType = null;
try {
rollupIndexStorageType = PropertyAnalyzer.analyzeStorageType(addRollupClause.getProperties());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(rollupIndexStorageType);
List<Column> rollupColumns = Env.getCurrentEnv().getMaterializedViewHandler()
.checkAndPrepareMaterializedView(addRollupClause, olapTable, baseRollupIndex, false);
short rollupShortKeyColumnCount = Env.calcShortKeyColumnCount(rollupColumns, alterClause.getProperties());
int rollupSchemaHash = Util.generateSchemaHash();
long rollupIndexId = Env.getCurrentEnv().getNextId();
olapTable.setIndexMeta(rollupIndexId, addRollupClause.getRollupName(), rollupColumns, schemaVersion,
rollupSchemaHash, rollupShortKeyColumnCount, rollupIndexStorageType, keysType);
}
Type sequenceColType = null;
try {
sequenceColType = PropertyAnalyzer.analyzeSequenceType(properties, olapTable.getKeysType());
if (sequenceColType != null) {
olapTable.setSequenceInfo(sequenceColType);
}
} catch (Exception e) {
throw new DdlException(e.getMessage());
}
Long versionInfo = null;
try {
versionInfo = PropertyAnalyzer.analyzeVersionInfo(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(versionInfo);
Set<Long> tabletIdSet = new HashSet<>();
try {
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
DistributionInfo partitionDistributionInfo = distributionDesc.toDistributionInfo(baseSchema);
String partitionName = tableName;
long partitionId = partitionNameToId.get(partitionName);
long indexNum = olapTable.getIndexIdToMeta().size();
long bucketNum = partitionDistributionInfo.getBucketNum();
long replicaNum = partitionInfo.getReplicaAllocation(partitionId).getTotalReplicaNum();
long totalReplicaNum = indexNum * bucketNum * replicaNum;
if (totalReplicaNum >= db.getReplicaQuotaLeftWithLock()) {
throw new DdlException(
"Database " + db.getFullName() + " create unpartitioned table " + tableName + " increasing "
+ totalReplicaNum + " of replica exceeds quota[" + db.getReplicaQuota() + "]");
}
Partition partition = createPartitionWithIndices(db.getClusterName(), db.getId(), olapTable.getId(),
olapTable.getBaseIndexId(), partitionId, partitionName, olapTable.getIndexIdToMeta(),
partitionDistributionInfo, partitionInfo.getDataProperty(partitionId).getStorageMedium(),
partitionInfo.getReplicaAllocation(partitionId), versionInfo, bfColumns, bfFpp, tabletIdSet,
olapTable.getCopiedIndexes(), isInMemory, storageFormat, tabletType, compressionType,
olapTable.getDataSortInfo(), olapTable.getEnableUniqueKeyMergeOnWrite(), storagePolicy);
olapTable.addPartition(partition);
} else if (partitionInfo.getType() == PartitionType.RANGE
|| partitionInfo.getType() == PartitionType.LIST) {
try {
PropertyAnalyzer.analyzeDataProperty(stmt.getProperties(), DataProperty.DEFAULT_DATA_PROPERTY);
if (partitionInfo.getType() == PartitionType.RANGE) {
DynamicPartitionUtil.checkAndSetDynamicPartitionProperty(olapTable, properties, db);
} else if (partitionInfo.getType() == PartitionType.LIST) {
if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(properties)) {
throw new DdlException(
"Only support dynamic partition properties on range partition table");
}
}
if (storagePolicy.equals("") && properties != null && !properties.isEmpty()) {
throw new DdlException("Unknown properties: " + properties);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
long totalReplicaNum = 0;
for (Map.Entry<String, Long> entry : partitionNameToId.entrySet()) {
long indexNum = olapTable.getIndexIdToMeta().size();
long bucketNum = defaultDistributionInfo.getBucketNum();
long replicaNum = partitionInfo.getReplicaAllocation(entry.getValue()).getTotalReplicaNum();
totalReplicaNum += indexNum * bucketNum * replicaNum;
}
if (totalReplicaNum >= db.getReplicaQuotaLeftWithLock()) {
throw new DdlException(
"Database " + db.getFullName() + " create table " + tableName + " increasing "
+ totalReplicaNum + " of replica exceeds quota[" + db.getReplicaQuota() + "]");
}
for (Map.Entry<String, Long> entry : partitionNameToId.entrySet()) {
DataProperty dataProperty = partitionInfo.getDataProperty(entry.getValue());
DistributionInfo partitionDistributionInfo = distributionDesc.toDistributionInfo(baseSchema);
String partionStoragePolicy = partitionInfo.getStoragePolicy(entry.getValue());
if (!partionStoragePolicy.equals("")) {
storagePolicy = partionStoragePolicy;
}
Env.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(storagePolicy);
Partition partition = createPartitionWithIndices(db.getClusterName(), db.getId(), olapTable.getId(),
olapTable.getBaseIndexId(), entry.getValue(), entry.getKey(), olapTable.getIndexIdToMeta(),
partitionDistributionInfo, dataProperty.getStorageMedium(),
partitionInfo.getReplicaAllocation(entry.getValue()), versionInfo, bfColumns, bfFpp,
tabletIdSet, olapTable.getCopiedIndexes(), isInMemory, storageFormat,
partitionInfo.getTabletType(entry.getValue()), compressionType,
olapTable.getDataSortInfo(), olapTable.getEnableUniqueKeyMergeOnWrite(), storagePolicy);
olapTable.addPartition(partition);
}
} else {
throw new DdlException("Unsupported partition method: " + partitionInfo.getType().name());
}
Pair<Boolean, Boolean> result = db.createTableWithLock(olapTable, false, stmt.isSetIfNotExists());
if (!result.first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
if (result.second) {
if (Env.getCurrentColocateIndex().isColocateTable(tableId)) {
Env.getCurrentColocateIndex().removeTable(tableId);
}
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
LOG.info("duplicate create table[{};{}], skip next steps", tableName, tableId);
} else {
if (Env.getCurrentColocateIndex().isColocateTable(tableId)) {
GroupId groupId = Env.getCurrentColocateIndex().getGroup(tableId);
Map<Tag, List<List<Long>>> backendsPerBucketSeq = Env.getCurrentColocateIndex()
.getBackendsPerBucketSeq(groupId);
ColocatePersistInfo info = ColocatePersistInfo.createForAddTable(groupId, tableId,
backendsPerBucketSeq);
Env.getCurrentEnv().getEditLog().logColocateAddTable(info);
}
LOG.info("successfully create table[{};{}]", tableName, tableId);
DynamicPartitionUtil.registerOrRemoveDynamicPartitionTable(db.getId(), olapTable, false);
Env.getCurrentEnv().getDynamicPartitionScheduler()
.createOrUpdateRuntimeInfo(tableId, DynamicPartitionScheduler.LAST_UPDATE_TIME,
TimeUtils.getCurrentFormatTime());
}
} catch (DdlException e) {
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
if (Env.getCurrentColocateIndex().isColocateTable(tableId)) {
Env.getCurrentColocateIndex().removeTable(tableId);
}
throw e;
}
} | } else { | private void createOlapTable(Database db, CreateTableStmt stmt) throws UserException {
String tableName = stmt.getTableName();
LOG.debug("begin create olap table: {}", tableName);
List<Column> baseSchema = stmt.getColumns();
validateColumns(baseSchema);
ReplicaAllocation replicaAlloc = PropertyAnalyzer.analyzeReplicaAllocation(stmt.getProperties(), "");
if (replicaAlloc.isNotSet()) {
replicaAlloc = ReplicaAllocation.DEFAULT_ALLOCATION;
}
long bufferSize = IdGeneratorUtil.getBufferSize(stmt, replicaAlloc);
IdGeneratorBuffer idGeneratorBuffer = Env.getCurrentEnv().getIdGeneratorBuffer(bufferSize);
PartitionDesc partitionDesc = stmt.getPartitionDesc();
PartitionInfo partitionInfo = null;
Map<String, Long> partitionNameToId = Maps.newHashMap();
if (partitionDesc != null) {
PartitionDesc partDesc = partitionDesc;
for (SinglePartitionDesc desc : partDesc.getSinglePartitionDescs()) {
long partitionId = idGeneratorBuffer.getNextId();
partitionNameToId.put(desc.getPartitionName(), partitionId);
}
partitionInfo = partitionDesc.toPartitionInfo(baseSchema, partitionNameToId, false);
} else {
if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(stmt.getProperties())) {
throw new DdlException("Only support dynamic partition properties on range partition table");
}
long partitionId = idGeneratorBuffer.getNextId();
partitionNameToId.put(tableName, partitionId);
partitionInfo = new SinglePartitionInfo();
}
KeysDesc keysDesc = stmt.getKeysDesc();
Preconditions.checkNotNull(keysDesc);
KeysType keysType = keysDesc.getKeysType();
DistributionDesc distributionDesc = stmt.getDistributionDesc();
Preconditions.checkNotNull(distributionDesc);
DistributionInfo defaultDistributionInfo = distributionDesc.toDistributionInfo(baseSchema);
short shortKeyColumnCount = Env.calcShortKeyColumnCount(baseSchema, stmt.getProperties());
LOG.debug("create table[{}] short key column count: {}", tableName, shortKeyColumnCount);
TableIndexes indexes = new TableIndexes(stmt.getIndexes());
long tableId = idGeneratorBuffer.getNextId();
OlapTable olapTable = new OlapTable(tableId, tableName, baseSchema, keysType, partitionInfo,
defaultDistributionInfo, indexes);
olapTable.setComment(stmt.getComment());
long baseIndexId = idGeneratorBuffer.getNextId();
olapTable.setBaseIndexId(baseIndexId);
Map<String, String> properties = stmt.getProperties();
Boolean useLightSchemaChange = false;
try {
useLightSchemaChange = PropertyAnalyzer.analyzeUseLightSchemaChange(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setUseLightSchemaChange(useLightSchemaChange);
if (useLightSchemaChange) {
for (Column column : baseSchema) {
column.setUniqueId(olapTable.incAndGetMaxColUniqueId());
LOG.debug("table: {}, newColumn: {}, uniqueId: {}", olapTable.getName(), column.getName(),
column.getUniqueId());
}
} else {
LOG.debug("table: {} doesn't use light schema change", olapTable.getName());
}
TStorageFormat storageFormat = TStorageFormat.V2;
try {
storageFormat = PropertyAnalyzer.analyzeStorageFormat(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setStorageFormat(storageFormat);
TCompressionType compressionType = TCompressionType.LZ4;
try {
compressionType = PropertyAnalyzer.analyzeCompressionType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setCompressionType(compressionType);
DataSortInfo dataSortInfo = PropertyAnalyzer.analyzeDataSortInfo(properties, keysType,
keysDesc.keysColumnSize(), storageFormat);
olapTable.setDataSortInfo(dataSortInfo);
boolean enableUniqueKeyMergeOnWrite = false;
try {
enableUniqueKeyMergeOnWrite = PropertyAnalyzer.analyzeUniqueKeyMergeOnWrite(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setEnableUniqueKeyMergeOnWrite(enableUniqueKeyMergeOnWrite);
Set<String> bfColumns = null;
double bfFpp = 0;
try {
bfColumns = PropertyAnalyzer.analyzeBloomFilterColumns(properties, baseSchema, keysType);
if (bfColumns != null && bfColumns.isEmpty()) {
bfColumns = null;
}
bfFpp = PropertyAnalyzer.analyzeBloomFilterFpp(properties);
if (bfColumns != null && bfFpp == 0) {
bfFpp = FeConstants.default_bloom_filter_fpp;
} else if (bfColumns == null) {
bfFpp = 0;
}
olapTable.setBloomFilterInfo(bfColumns, bfFpp);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
olapTable.setReplicationAllocation(replicaAlloc);
boolean isInMemory = PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_INMEMORY,
false);
olapTable.setIsInMemory(isInMemory);
String remoteStoragePolicy = PropertyAnalyzer.analyzeRemoteStoragePolicy(properties);
olapTable.setRemoteStoragePolicy(remoteStoragePolicy);
String storagePolicy = PropertyAnalyzer.analyzeStoragePolicy(properties);
Env.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(storagePolicy);
olapTable.setStoragePolicy(storagePolicy);
TTabletType tabletType;
try {
tabletType = PropertyAnalyzer.analyzeTabletType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
long partitionId = partitionNameToId.get(tableName);
DataProperty dataProperty = null;
try {
dataProperty = PropertyAnalyzer.analyzeDataProperty(stmt.getProperties(),
DataProperty.DEFAULT_DATA_PROPERTY);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(dataProperty);
partitionInfo.setDataProperty(partitionId, dataProperty);
partitionInfo.setReplicaAllocation(partitionId, replicaAlloc);
partitionInfo.setIsInMemory(partitionId, isInMemory);
partitionInfo.setTabletType(partitionId, tabletType);
}
try {
String colocateGroup = PropertyAnalyzer.analyzeColocate(properties);
if (colocateGroup != null) {
if (defaultDistributionInfo.getType() == DistributionInfoType.RANDOM) {
throw new AnalysisException("Random distribution for colocate table is unsupported");
}
String fullGroupName = db.getId() + "_" + colocateGroup;
ColocateGroupSchema groupSchema = Env.getCurrentColocateIndex().getGroupSchema(fullGroupName);
if (groupSchema != null) {
groupSchema.checkColocateSchema(olapTable);
}
Env.getCurrentColocateIndex()
.addTableToGroup(db.getId(), olapTable, colocateGroup, null /* generate group id inside */);
olapTable.setColocateGroup(colocateGroup);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
TStorageType baseIndexStorageType = null;
try {
baseIndexStorageType = PropertyAnalyzer.analyzeStorageType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(baseIndexStorageType);
int schemaVersion = 0;
try {
schemaVersion = PropertyAnalyzer.analyzeSchemaVersion(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
int schemaHash = Util.generateSchemaHash();
olapTable.setIndexMeta(baseIndexId, tableName, baseSchema, schemaVersion, schemaHash, shortKeyColumnCount,
baseIndexStorageType, keysType);
for (AlterClause alterClause : stmt.getRollupAlterClauseList()) {
AddRollupClause addRollupClause = (AddRollupClause) alterClause;
Long baseRollupIndex = olapTable.getIndexIdByName(tableName);
TStorageType rollupIndexStorageType = null;
try {
rollupIndexStorageType = PropertyAnalyzer.analyzeStorageType(addRollupClause.getProperties());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(rollupIndexStorageType);
List<Column> rollupColumns = Env.getCurrentEnv().getMaterializedViewHandler()
.checkAndPrepareMaterializedView(addRollupClause, olapTable, baseRollupIndex, false);
short rollupShortKeyColumnCount = Env.calcShortKeyColumnCount(rollupColumns, alterClause.getProperties());
int rollupSchemaHash = Util.generateSchemaHash();
long rollupIndexId = idGeneratorBuffer.getNextId();
olapTable.setIndexMeta(rollupIndexId, addRollupClause.getRollupName(), rollupColumns, schemaVersion,
rollupSchemaHash, rollupShortKeyColumnCount, rollupIndexStorageType, keysType);
}
Type sequenceColType = null;
try {
sequenceColType = PropertyAnalyzer.analyzeSequenceType(properties, olapTable.getKeysType());
if (sequenceColType != null) {
if (olapTable.getEnableUniqueKeyMergeOnWrite()) {
throw new AnalysisException("Unique key table with MoW(merge on write) not support "
+ "sequence column for now");
}
olapTable.setSequenceInfo(sequenceColType);
}
} catch (Exception e) {
throw new DdlException(e.getMessage());
}
Long versionInfo = null;
try {
versionInfo = PropertyAnalyzer.analyzeVersionInfo(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(versionInfo);
Set<Long> tabletIdSet = new HashSet<>();
try {
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
DistributionInfo partitionDistributionInfo = distributionDesc.toDistributionInfo(baseSchema);
String partitionName = tableName;
long partitionId = partitionNameToId.get(partitionName);
long indexNum = olapTable.getIndexIdToMeta().size();
long bucketNum = partitionDistributionInfo.getBucketNum();
long replicaNum = partitionInfo.getReplicaAllocation(partitionId).getTotalReplicaNum();
long totalReplicaNum = indexNum * bucketNum * replicaNum;
if (totalReplicaNum >= db.getReplicaQuotaLeftWithLock()) {
throw new DdlException(
"Database " + db.getFullName() + " create unpartitioned table " + tableName + " increasing "
+ totalReplicaNum + " of replica exceeds quota[" + db.getReplicaQuota() + "]");
}
Partition partition = createPartitionWithIndices(db.getClusterName(), db.getId(), olapTable.getId(),
olapTable.getBaseIndexId(), partitionId, partitionName, olapTable.getIndexIdToMeta(),
partitionDistributionInfo, partitionInfo.getDataProperty(partitionId).getStorageMedium(),
partitionInfo.getReplicaAllocation(partitionId), versionInfo, bfColumns, bfFpp, tabletIdSet,
olapTable.getCopiedIndexes(), isInMemory, storageFormat, tabletType, compressionType,
olapTable.getDataSortInfo(), olapTable.getEnableUniqueKeyMergeOnWrite(), storagePolicy,
idGeneratorBuffer);
olapTable.addPartition(partition);
} else if (partitionInfo.getType() == PartitionType.RANGE
|| partitionInfo.getType() == PartitionType.LIST) {
try {
PropertyAnalyzer.analyzeDataProperty(stmt.getProperties(), DataProperty.DEFAULT_DATA_PROPERTY);
if (partitionInfo.getType() == PartitionType.RANGE) {
DynamicPartitionUtil.checkAndSetDynamicPartitionProperty(olapTable, properties, db);
} else if (partitionInfo.getType() == PartitionType.LIST) {
if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(properties)) {
throw new DdlException(
"Only support dynamic partition properties on range partition table");
}
}
if (storagePolicy.equals("") && properties != null && !properties.isEmpty()) {
throw new DdlException("Unknown properties: " + properties);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
long totalReplicaNum = 0;
for (Map.Entry<String, Long> entry : partitionNameToId.entrySet()) {
long indexNum = olapTable.getIndexIdToMeta().size();
long bucketNum = defaultDistributionInfo.getBucketNum();
long replicaNum = partitionInfo.getReplicaAllocation(entry.getValue()).getTotalReplicaNum();
totalReplicaNum += indexNum * bucketNum * replicaNum;
}
if (totalReplicaNum >= db.getReplicaQuotaLeftWithLock()) {
throw new DdlException(
"Database " + db.getFullName() + " create table " + tableName + " increasing "
+ totalReplicaNum + " of replica exceeds quota[" + db.getReplicaQuota() + "]");
}
for (Map.Entry<String, Long> entry : partitionNameToId.entrySet()) {
DataProperty dataProperty = partitionInfo.getDataProperty(entry.getValue());
DistributionInfo partitionDistributionInfo = distributionDesc.toDistributionInfo(baseSchema);
String partionStoragePolicy = partitionInfo.getStoragePolicy(entry.getValue());
if (!partionStoragePolicy.equals("")) {
storagePolicy = partionStoragePolicy;
}
Env.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(storagePolicy);
Partition partition = createPartitionWithIndices(db.getClusterName(), db.getId(), olapTable.getId(),
olapTable.getBaseIndexId(), entry.getValue(), entry.getKey(), olapTable.getIndexIdToMeta(),
partitionDistributionInfo, dataProperty.getStorageMedium(),
partitionInfo.getReplicaAllocation(entry.getValue()), versionInfo, bfColumns, bfFpp,
tabletIdSet, olapTable.getCopiedIndexes(), isInMemory, storageFormat,
partitionInfo.getTabletType(entry.getValue()), compressionType,
olapTable.getDataSortInfo(), olapTable.getEnableUniqueKeyMergeOnWrite(), storagePolicy,
idGeneratorBuffer);
olapTable.addPartition(partition);
}
} else {
throw new DdlException("Unsupported partition method: " + partitionInfo.getType().name());
}
Pair<Boolean, Boolean> result = db.createTableWithLock(olapTable, false, stmt.isSetIfNotExists());
if (!result.first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
if (result.second) {
if (Env.getCurrentColocateIndex().isColocateTable(tableId)) {
Env.getCurrentColocateIndex().removeTable(tableId);
}
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
LOG.info("duplicate create table[{};{}], skip next steps", tableName, tableId);
} else {
if (Env.getCurrentColocateIndex().isColocateTable(tableId)) {
GroupId groupId = Env.getCurrentColocateIndex().getGroup(tableId);
Map<Tag, List<List<Long>>> backendsPerBucketSeq = Env.getCurrentColocateIndex()
.getBackendsPerBucketSeq(groupId);
ColocatePersistInfo info = ColocatePersistInfo.createForAddTable(groupId, tableId,
backendsPerBucketSeq);
Env.getCurrentEnv().getEditLog().logColocateAddTable(info);
}
LOG.info("successfully create table[{};{}]", tableName, tableId);
DynamicPartitionUtil.registerOrRemoveDynamicPartitionTable(db.getId(), olapTable, false);
Env.getCurrentEnv().getDynamicPartitionScheduler()
.createOrUpdateRuntimeInfo(tableId, DynamicPartitionScheduler.LAST_UPDATE_TIME,
TimeUtils.getCurrentFormatTime());
}
} catch (DdlException e) {
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
if (Env.getCurrentColocateIndex().isColocateTable(tableId)) {
Env.getCurrentColocateIndex().removeTable(tableId);
}
throw e;
}
} | class InternalDataSource implements DataSourceIf<Database> {
public static final String INTERNAL_DS_NAME = "internal";
public static final long INTERNAL_DS_ID = 0L;
private static final Logger LOG = LogManager.getLogger(InternalDataSource.class);
private QueryableReentrantLock lock = new QueryableReentrantLock(true);
private ConcurrentHashMap<Long, Database> idToDb = new ConcurrentHashMap<>();
private ConcurrentHashMap<String, Database> fullNameToDb = new ConcurrentHashMap<>();
private ConcurrentHashMap<Long, Cluster> idToCluster = new ConcurrentHashMap<>();
private ConcurrentHashMap<String, Cluster> nameToCluster = new ConcurrentHashMap<>();
@Getter
private EsRepository esRepository = new EsRepository();
@Getter
private IcebergTableCreationRecordMgr icebergTableCreationRecordMgr = new IcebergTableCreationRecordMgr();
@Override
public long getId() {
return INTERNAL_DS_ID;
}
@Override
public String getType() {
return "internal";
}
@Override
public String getName() {
return INTERNAL_DS_NAME;
}
@Override
public List<String> getDbNames() {
return Lists.newArrayList(fullNameToDb.keySet());
}
@Nullable
@Override
public Database getDbNullable(String dbName) {
if (fullNameToDb.containsKey(dbName)) {
return fullNameToDb.get(dbName);
} else {
String fullName = ClusterNamespace.getNameFromFullName(dbName);
if (fullName.equalsIgnoreCase(InfoSchemaDb.DATABASE_NAME)) {
String clusterName = ClusterNamespace.getClusterNameFromFullName(dbName);
fullName = ClusterNamespace.getFullName(clusterName, fullName.toLowerCase());
return fullNameToDb.get(fullName);
}
}
return null;
}
@Nullable
@Override
public Database getDbNullable(long dbId) {
return idToDb.get(dbId);
}
@Override
public Map<String, String> getProperties() {
return Maps.newHashMap();
}
@Override
public void modifyDatasourceName(String name) {
LOG.warn("Ignore the modify datasource name in build-in datasource.");
}
@Override
public void modifyDatasourceProps(Map<String, String> props) {
LOG.warn("Ignore the modify datasource props in build-in datasource.");
}
private boolean tryLock(boolean mustLock) {
while (true) {
try {
if (!lock.tryLock(Config.catalog_try_lock_timeout_ms, TimeUnit.MILLISECONDS)) {
if (LOG.isDebugEnabled()) {
Thread owner = lock.getOwner();
if (owner != null) {
LOG.debug("catalog lock is held by: {}", Util.dumpThread(owner, 10));
}
}
if (mustLock) {
continue;
} else {
return false;
}
}
return true;
} catch (InterruptedException e) {
LOG.warn("got exception while getting catalog lock", e);
if (mustLock) {
continue;
} else {
return lock.isHeldByCurrentThread();
}
}
}
}
public List<Long> getDbIds() {
return Lists.newArrayList(idToDb.keySet());
}
private void unlock() {
if (lock.isHeldByCurrentThread()) {
this.lock.unlock();
}
}
/**
* create the tablet inverted index from metadata.
*/
public void recreateTabletInvertIndex() {
if (Env.isCheckpointThread()) {
return;
}
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
for (Database db : this.fullNameToDb.values()) {
long dbId = db.getId();
for (Table table : db.getTables()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
long tableId = olapTable.getId();
Collection<Partition> allPartitions = olapTable.getAllPartitions();
for (Partition partition : allPartitions) {
long partitionId = partition.getId();
TStorageMedium medium = olapTable.getPartitionInfo().getDataProperty(partitionId)
.getStorageMedium();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = index.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash, medium);
for (Tablet tablet : index.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
}
}
}
/**
* Entry of creating a database.
*
* @param stmt
* @throws DdlException
*/
public void createDb(CreateDbStmt stmt) throws DdlException {
final String clusterName = stmt.getClusterName();
String fullDbName = stmt.getFullDbName();
Map<String, String> properties = stmt.getProperties();
long id = Env.getCurrentEnv().getNextId();
Database db = new Database(id, fullDbName);
db.setClusterName(clusterName);
db.setDbProperties(new DatabaseProperty(properties).checkAndBuildProperties());
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!nameToCluster.containsKey(clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER, clusterName);
}
if (fullNameToDb.containsKey(fullDbName)) {
if (stmt.isSetIfNotExists()) {
LOG.info("create database[{}] which already exists", fullDbName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_DB_CREATE_EXISTS, fullDbName);
}
} else {
unprotectCreateDb(db);
Env.getCurrentEnv().getEditLog().logCreateDb(db);
}
} finally {
unlock();
}
LOG.info("createDb dbName = " + fullDbName + ", id = " + id);
if (db.getDbProperties().getIcebergProperty().isExist()) {
icebergTableCreationRecordMgr.registerDb(db);
}
}
/**
* For replaying creating database.
*
* @param db
*/
public void unprotectCreateDb(Database db) {
idToDb.put(db.getId(), db);
fullNameToDb.put(db.getFullName(), db);
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.addDb(db.getFullName(), db.getId());
Env.getCurrentGlobalTransactionMgr().addDatabaseTransactionMgr(db.getId());
}
public void addCluster(Cluster cluster) {
nameToCluster.put(cluster.getName(), cluster);
idToCluster.put(cluster.getId(), cluster);
}
/**
* replayCreateDb.
*
* @param db
*/
public void replayCreateDb(Database db) {
tryLock(true);
try {
unprotectCreateDb(db);
} finally {
unlock();
}
}
public void dropDb(DropDbStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!fullNameToDb.containsKey(dbName)) {
if (stmt.isSetIfExists()) {
LOG.info("drop database[{}] which does not exist", dbName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_DB_DROP_EXISTS, dbName);
}
}
Database db = this.fullNameToDb.get(dbName);
db.writeLock();
try {
if (!stmt.isForceDrop()) {
if (Env.getCurrentEnv().getGlobalTransactionMgr().existCommittedTxns(db.getId(), null, null)) {
throw new DdlException(
"There are still some transactions in the COMMITTED state waiting to be completed. "
+ "The database [" + dbName
+ "] cannot be dropped. If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP database FORCE\".");
}
}
if (db.getDbState() == DbState.LINK && dbName.equals(db.getAttachDb())) {
final DropLinkDbAndUpdateDbInfo info = new DropLinkDbAndUpdateDbInfo();
fullNameToDb.remove(db.getAttachDb());
db.setDbState(DbState.NORMAL);
info.setUpdateDbState(DbState.NORMAL);
final Cluster cluster = nameToCluster.get(
ClusterNamespace.getClusterNameFromFullName(db.getAttachDb()));
final BaseParam param = new BaseParam();
param.addStringParam(db.getAttachDb());
param.addLongParam(db.getId());
cluster.removeLinkDb(param);
info.setDropDbCluster(cluster.getName());
info.setDropDbId(db.getId());
info.setDropDbName(db.getAttachDb());
Env.getCurrentEnv().getEditLog().logDropLinkDb(info);
return;
}
if (db.getDbState() == DbState.LINK && dbName.equals(db.getFullName())) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE,
ClusterNamespace.getNameFromFullName(dbName));
return;
}
if (dbName.equals(db.getAttachDb()) && db.getDbState() == DbState.MOVE) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE,
ClusterNamespace.getNameFromFullName(dbName));
return;
}
Set<String> tableNames = db.getTableNamesWithLock();
List<Table> tableList = db.getTablesOnIdOrder();
MetaLockUtils.writeLockTables(tableList);
try {
if (!stmt.isForceDrop()) {
for (Table table : tableList) {
if (table.getType() == TableType.OLAP) {
OlapTable olapTable = (OlapTable) table;
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("The table [" + olapTable.getState() + "]'s state is "
+ olapTable.getState() + ", cannot be dropped."
+ " please cancel the operation on olap table firstly."
+ " If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP table FORCE\".");
}
}
}
}
unprotectDropDb(db, stmt.isForceDrop(), false);
} finally {
MetaLockUtils.writeUnlockTables(tableList);
}
if (!stmt.isForceDrop()) {
Env.getCurrentRecycleBin().recycleDatabase(db, tableNames);
} else {
Env.getCurrentEnv().eraseDatabase(db.getId(), false);
}
} finally {
db.writeUnlock();
}
idToDb.remove(db.getId());
fullNameToDb.remove(db.getFullName());
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.removeDb(dbName, db.getId());
DropDbInfo info = new DropDbInfo(dbName, stmt.isForceDrop());
Env.getCurrentEnv().getEditLog().logDropDb(info);
} finally {
unlock();
}
LOG.info("finish drop database[{}], is force : {}", dbName, stmt.isForceDrop());
}
public void unprotectDropDb(Database db, boolean isForeDrop, boolean isReplay) {
if (db.getDbProperties().getIcebergProperty().isExist()) {
icebergTableCreationRecordMgr.deregisterDb(db);
}
for (Table table : db.getTables()) {
unprotectDropTable(db, table, isForeDrop, isReplay);
}
db.markDropped();
}
public void replayDropLinkDb(DropLinkDbAndUpdateDbInfo info) {
tryLock(true);
try {
final Database db = this.fullNameToDb.remove(info.getDropDbName());
db.setDbState(info.getUpdateDbState());
final Cluster cluster = nameToCluster.get(info.getDropDbCluster());
final BaseParam param = new BaseParam();
param.addStringParam(db.getAttachDb());
param.addLongParam(db.getId());
cluster.removeLinkDb(param);
} finally {
unlock();
}
}
public void replayDropDb(String dbName, boolean isForceDrop) throws DdlException {
tryLock(true);
try {
Database db = fullNameToDb.get(dbName);
db.writeLock();
try {
Set<String> tableNames = db.getTableNamesWithLock();
List<Table> tableList = db.getTablesOnIdOrder();
MetaLockUtils.writeLockTables(tableList);
try {
unprotectDropDb(db, isForceDrop, true);
} finally {
MetaLockUtils.writeUnlockTables(tableList);
}
if (!isForceDrop) {
Env.getCurrentRecycleBin().recycleDatabase(db, tableNames);
} else {
Env.getCurrentEnv().eraseDatabase(db.getId(), false);
}
} finally {
db.writeUnlock();
}
fullNameToDb.remove(dbName);
idToDb.remove(db.getId());
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.removeDb(dbName, db.getId());
} finally {
unlock();
}
}
public void recoverDatabase(RecoverDbStmt recoverStmt) throws DdlException {
if (getDb(recoverStmt.getDbName()).isPresent()) {
throw new DdlException("Database[" + recoverStmt.getDbName() + "] already exist.");
}
Database db = Env.getCurrentRecycleBin().recoverDatabase(recoverStmt.getDbName());
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
db.writeLock();
List<Table> tableList = db.getTablesOnIdOrder();
MetaLockUtils.writeLockTables(tableList);
try {
if (fullNameToDb.containsKey(db.getFullName())) {
throw new DdlException("Database[" + db.getFullName() + "] already exist.");
}
fullNameToDb.put(db.getFullName(), db);
idToDb.put(db.getId(), db);
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.addDb(db.getFullName(), db.getId());
RecoverInfo recoverInfo = new RecoverInfo(db.getId(), -1L, -1L);
Env.getCurrentEnv().getEditLog().logRecoverDb(recoverInfo);
db.unmarkDropped();
} finally {
MetaLockUtils.writeUnlockTables(tableList);
db.writeUnlock();
unlock();
}
LOG.info("recover database[{}]", db.getId());
}
public void recoverTable(RecoverTableStmt recoverStmt) throws DdlException {
String dbName = recoverStmt.getDbName();
String tableName = recoverStmt.getTableName();
Database db = (Database) getDbOrDdlException(dbName);
db.writeLockOrDdlException();
try {
if (db.getTable(tableName).isPresent()) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
if (!Env.getCurrentRecycleBin().recoverTable(db, tableName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_TABLE, tableName, dbName);
}
} finally {
db.writeUnlock();
}
}
public void recoverPartition(RecoverPartitionStmt recoverStmt) throws DdlException {
String dbName = recoverStmt.getDbName();
String tableName = recoverStmt.getTableName();
Database db = getDbOrDdlException(dbName);
OlapTable olapTable = db.getOlapTableOrDdlException(tableName);
olapTable.writeLockOrDdlException();
try {
String partitionName = recoverStmt.getPartitionName();
if (olapTable.getPartition(partitionName) != null) {
throw new DdlException("partition[" + partitionName + "] already exist in table[" + tableName + "]");
}
Env.getCurrentRecycleBin().recoverPartition(db.getId(), olapTable, partitionName);
} finally {
olapTable.writeUnlock();
}
}
public void replayEraseDatabase(long dbId) throws DdlException {
Env.getCurrentRecycleBin().replayEraseDatabase(dbId);
}
public void replayRecoverDatabase(RecoverInfo info) {
long dbId = info.getDbId();
Database db = Env.getCurrentRecycleBin().replayRecoverDatabase(dbId);
replayCreateDb(db);
LOG.info("replay recover db[{}]", dbId);
}
public void alterDatabaseQuota(AlterDatabaseQuotaStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
Database db = (Database) getDbOrDdlException(dbName);
QuotaType quotaType = stmt.getQuotaType();
db.writeLockOrDdlException();
try {
if (quotaType == QuotaType.DATA) {
db.setDataQuota(stmt.getQuota());
} else if (quotaType == QuotaType.REPLICA) {
db.setReplicaQuota(stmt.getQuota());
}
long quota = stmt.getQuota();
DatabaseInfo dbInfo = new DatabaseInfo(dbName, "", quota, quotaType);
Env.getCurrentEnv().getEditLog().logAlterDb(dbInfo);
} finally {
db.writeUnlock();
}
}
public void replayAlterDatabaseQuota(String dbName, long quota, QuotaType quotaType) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(dbName);
db.writeLock();
try {
if (quotaType == QuotaType.DATA) {
db.setDataQuota(quota);
} else if (quotaType == QuotaType.REPLICA) {
db.setReplicaQuota(quota);
}
} finally {
db.writeUnlock();
}
}
public void renameDatabase(AlterDatabaseRename stmt) throws DdlException {
String fullDbName = stmt.getDbName();
String newFullDbName = stmt.getNewDbName();
String clusterName = stmt.getClusterName();
if (fullDbName.equals(newFullDbName)) {
throw new DdlException("Same database name");
}
Database db = null;
Cluster cluster = null;
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
cluster = nameToCluster.get(clusterName);
if (cluster == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
db = fullNameToDb.get(fullDbName);
if (db == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_BAD_DB_ERROR, fullDbName);
}
if (db.getDbState() == DbState.LINK || db.getDbState() == DbState.MOVE) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_RENAME_DB_ERR, fullDbName);
}
if (fullNameToDb.get(newFullDbName) != null) {
throw new DdlException("Database name[" + newFullDbName + "] is already used");
}
cluster.removeDb(db.getFullName(), db.getId());
cluster.addDb(newFullDbName, db.getId());
db.setNameWithLock(newFullDbName);
fullNameToDb.remove(fullDbName);
fullNameToDb.put(newFullDbName, db);
DatabaseInfo dbInfo = new DatabaseInfo(fullDbName, newFullDbName, -1L, QuotaType.NONE);
Env.getCurrentEnv().getEditLog().logDatabaseRename(dbInfo);
} finally {
unlock();
}
LOG.info("rename database[{}] to [{}]", fullDbName, newFullDbName);
}
public void replayRenameDatabase(String dbName, String newDbName) {
tryLock(true);
try {
Database db = fullNameToDb.get(dbName);
Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.removeDb(db.getFullName(), db.getId());
db.setName(newDbName);
cluster.addDb(newDbName, db.getId());
fullNameToDb.remove(dbName);
fullNameToDb.put(newDbName, db);
} finally {
unlock();
}
LOG.info("replay rename database {} to {}", dbName, newDbName);
}
public void dropTable(DropTableStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
Database db = (Database) getDbOrDdlException(dbName);
db.writeLockOrDdlException();
try {
Table table = db.getTableNullable(tableName);
if (table == null) {
if (stmt.isSetIfExists()) {
LOG.info("drop table[{}] which does not exist", tableName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_TABLE, tableName, dbName);
}
}
if (stmt.isView()) {
if (!(table instanceof View)) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "VIEW");
}
} else {
if (table instanceof View) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "TABLE");
}
}
if (!stmt.isForceDrop()) {
if (Env.getCurrentEnv().getGlobalTransactionMgr().existCommittedTxns(db.getId(), table.getId(), null)) {
throw new DdlException(
"There are still some transactions in the COMMITTED state waiting to be completed. "
+ "The table [" + tableName
+ "] cannot be dropped. If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP table FORCE\".");
}
}
DropInfo info = new DropInfo(db.getId(), table.getId(), -1L, stmt.isForceDrop());
table.writeLock();
try {
if (table instanceof OlapTable && !stmt.isForceDrop()) {
OlapTable olapTable = (OlapTable) table;
if ((olapTable.getState() != OlapTableState.NORMAL)) {
throw new DdlException("The table [" + tableName + "]'s state is " + olapTable.getState()
+ ", cannot be dropped." + " please cancel the operation on olap table firstly."
+ " If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP table FORCE\".");
}
}
unprotectDropTable(db, table, stmt.isForceDrop(), false);
} finally {
table.writeUnlock();
}
Env.getCurrentEnv().getEditLog().logDropTable(info);
} finally {
db.writeUnlock();
}
LOG.info("finished dropping table: {} from db: {}, is force: {}", tableName, dbName, stmt.isForceDrop());
}
public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, boolean isReplay) {
if (table.getType() == TableType.ELASTICSEARCH) {
esRepository.deRegisterTable(table.getId());
} else if (table.getType() == TableType.OLAP) {
((OlapTable) table).dropAllTempPartitions();
} else if (table.getType() == TableType.ICEBERG) {
icebergTableCreationRecordMgr.deregisterTable(db, (IcebergTable) table);
}
db.dropTable(table.getName());
if (!isForceDrop) {
Env.getCurrentRecycleBin().recycleTable(db.getId(), table, isReplay);
} else {
if (table.getType() == TableType.OLAP) {
Env.getCurrentEnv().onEraseOlapTable((OlapTable) table, isReplay);
}
}
LOG.info("finished dropping table[{}] in db[{}]", table.getName(), db.getFullName());
return true;
}
public void replayDropTable(Database db, long tableId, boolean isForceDrop) throws MetaNotFoundException {
Table table = db.getTableOrMetaException(tableId);
db.writeLock();
table.writeLock();
try {
unprotectDropTable(db, table, isForceDrop, true);
} finally {
table.writeUnlock();
db.writeUnlock();
}
}
public void replayEraseTable(long tableId) {
Env.getCurrentRecycleBin().replayEraseTable(tableId);
}
public void replayRecoverTable(RecoverInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
db.writeLock();
try {
Env.getCurrentRecycleBin().replayRecoverTable(db, info.getTableId());
} finally {
db.writeUnlock();
}
}
private void unprotectAddReplica(OlapTable olapTable, ReplicaPersistInfo info) {
LOG.debug("replay add a replica {}", info);
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex materializedIndex = partition.getIndex(info.getIndexId());
Tablet tablet = materializedIndex.getTablet(info.getTabletId());
int schemaHash = info.getSchemaHash();
if (schemaHash == -1) {
schemaHash = olapTable.getSchemaHashByIndexId(info.getIndexId());
}
Replica replica =
new Replica(info.getReplicaId(), info.getBackendId(), info.getVersion(), schemaHash, info.getDataSize(),
info.getRemoteDataSize(), info.getRowCount(), ReplicaState.NORMAL, info.getLastFailedVersion(),
info.getLastSuccessVersion());
tablet.addReplica(replica);
}
private void unprotectUpdateReplica(OlapTable olapTable, ReplicaPersistInfo info) {
LOG.debug("replay update a replica {}", info);
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex materializedIndex = partition.getIndex(info.getIndexId());
Tablet tablet = materializedIndex.getTablet(info.getTabletId());
Replica replica = tablet.getReplicaByBackendId(info.getBackendId());
Preconditions.checkNotNull(replica, info);
replica.updateVersionInfo(info.getVersion(), info.getDataSize(), info.getRemoteDataSize(), info.getRowCount());
replica.setBad(false);
}
public void replayAddReplica(ReplicaPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
unprotectAddReplica(olapTable, info);
} finally {
olapTable.writeUnlock();
}
}
public void replayUpdateReplica(ReplicaPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
unprotectUpdateReplica(olapTable, info);
} finally {
olapTable.writeUnlock();
}
}
public void unprotectDeleteReplica(OlapTable olapTable, ReplicaPersistInfo info) {
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex materializedIndex = partition.getIndex(info.getIndexId());
Tablet tablet = materializedIndex.getTablet(info.getTabletId());
tablet.deleteReplicaByBackendId(info.getBackendId());
}
public void replayDeleteReplica(ReplicaPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
unprotectDeleteReplica(olapTable, info);
} finally {
olapTable.writeUnlock();
}
}
/**
* Following is the step to create an olap table:
* 1. create columns
* 2. create partition info
* 3. create distribution info
* 4. set table id and base index id
* 5. set bloom filter columns
* 6. set and build TableProperty includes:
* 6.1. dynamicProperty
* 6.2. replicationNum
* 6.3. inMemory
* 6.4. storageFormat
* 6.5. compressionType
* 7. set index meta
* 8. check colocation properties
* 9. create tablet in BE
* 10. add this table to FE's meta
* 11. add this table to ColocateGroup if necessary
*/
public void createTable(CreateTableStmt stmt) throws UserException {
String engineName = stmt.getEngineName();
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
Database db = (Database) getDbOrDdlException(dbName);
if (!stmt.isExternal()) {
Env.getCurrentSystemInfo().checkClusterCapacity(stmt.getClusterName());
db.checkQuota();
}
if (db.getTable(tableName).isPresent()) {
if (stmt.isSetIfNotExists()) {
LOG.info("create table[{}] which already exists", tableName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
if (engineName.equals("olap")) {
createOlapTable(db, stmt);
return;
} else if (engineName.equals("odbc")) {
createOdbcTable(db, stmt);
return;
} else if (engineName.equals("mysql")) {
createMysqlTable(db, stmt);
return;
} else if (engineName.equals("broker")) {
createBrokerTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("elasticsearch") || engineName.equalsIgnoreCase("es")) {
createEsTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("hive")) {
createHiveTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("iceberg")) {
IcebergCatalogMgr.createIcebergTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("hudi")) {
createHudiTable(db, stmt);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_STORAGE_ENGINE, engineName);
}
Preconditions.checkState(false);
}
public void createTableLike(CreateTableLikeStmt stmt) throws DdlException {
try {
DatabaseIf db = getDbOrDdlException(stmt.getExistedDbName());
TableIf table = db.getTableOrDdlException(stmt.getExistedTableName());
if (table.getType() == TableType.VIEW) {
throw new DdlException("Not support create table from a View");
}
List<String> createTableStmt = Lists.newArrayList();
table.readLock();
try {
if (table.getType() == TableType.OLAP) {
if (!CollectionUtils.isEmpty(stmt.getRollupNames())) {
OlapTable olapTable = (OlapTable) table;
for (String rollupIndexName : stmt.getRollupNames()) {
if (!olapTable.hasMaterializedIndex(rollupIndexName)) {
throw new DdlException("Rollup index[" + rollupIndexName + "] not exists in Table["
+ olapTable.getName() + "]");
}
}
}
} else if (!CollectionUtils.isEmpty(stmt.getRollupNames()) || stmt.isWithAllRollup()) {
throw new DdlException("Table[" + table.getName() + "] is external, not support rollup copy");
}
Env.getDdlStmt(stmt, stmt.getDbName(), table, createTableStmt, null, null, false, false, true);
if (createTableStmt.isEmpty()) {
ErrorReport.reportDdlException(ErrorCode.ERROR_CREATE_TABLE_LIKE_EMPTY, "CREATE");
}
} finally {
table.readUnlock();
}
CreateTableStmt parsedCreateTableStmt = (CreateTableStmt) SqlParserUtils.parseAndAnalyzeStmt(
createTableStmt.get(0), ConnectContext.get());
parsedCreateTableStmt.setTableName(stmt.getTableName());
parsedCreateTableStmt.setIfNotExists(stmt.isIfNotExists());
createTable(parsedCreateTableStmt);
} catch (UserException e) {
throw new DdlException("Failed to execute CREATE TABLE LIKE " + stmt.getExistedTableName() + ". Reason: "
+ e.getMessage());
}
}
/**
* Create table for select.
**/
public void createTableAsSelect(CreateTableAsSelectStmt stmt) throws DdlException {
try {
List<String> columnNames = stmt.getColumnNames();
CreateTableStmt createTableStmt = stmt.getCreateTableStmt();
QueryStmt queryStmt = stmt.getQueryStmt();
ArrayList<Expr> resultExprs = queryStmt.getResultExprs();
ArrayList<String> colLabels = queryStmt.getColLabels();
int size = resultExprs.size();
int colNameIndex = 0;
for (int i = 0; i < size; ++i) {
String name;
if (columnNames != null) {
name = columnNames.get(i);
} else {
name = colLabels.get(i);
}
try {
FeNameFormat.checkColumnName(name);
} catch (AnalysisException exception) {
name = "_col" + (colNameIndex++);
}
TypeDef typeDef;
Expr resultExpr = resultExprs.get(i);
Type resultType = resultExpr.getType();
if (resultType.isStringType() && resultType.getLength() < 0) {
typeDef = new TypeDef(Type.STRING);
} else if (resultType.isDecimalV2() && resultType.equals(ScalarType.DECIMALV2)) {
typeDef = new TypeDef(ScalarType.createDecimalType(27, 9));
} else if (resultType.isDecimalV3()) {
typeDef = new TypeDef(ScalarType.createDecimalType(resultType.getPrecision(),
((ScalarType) resultType).getScalarScale()));
} else {
typeDef = new TypeDef(resultExpr.getType());
}
ColumnDef columnDef;
if (resultExpr.getSrcSlotRef() == null) {
columnDef = new ColumnDef(name, typeDef, false, null, true, new DefaultValue(false, null), "");
} else {
Column column = resultExpr.getSrcSlotRef().getDesc().getColumn();
boolean setDefault = StringUtils.isNotBlank(column.getDefaultValue());
columnDef = new ColumnDef(name, typeDef, column.isKey(), column.getAggregationType(),
column.isAllowNull(), new DefaultValue(setDefault, column.getDefaultValue()),
column.getComment());
}
createTableStmt.addColumnDef(columnDef);
if (createTableStmt.getDistributionDesc() == null && i == 0) {
createTableStmt.setDistributionDesc(new HashDistributionDesc(10, Lists.newArrayList(name)));
}
}
Analyzer dummyRootAnalyzer = new Analyzer(Env.getCurrentEnv(), ConnectContext.get());
createTableStmt.analyze(dummyRootAnalyzer);
createTable(createTableStmt);
} catch (UserException e) {
throw new DdlException("Failed to execute CTAS Reason: " + e.getMessage());
}
}
public void replayCreateTable(String dbName, Table table) throws MetaNotFoundException {
Database db = this.fullNameToDb.get(dbName);
try {
db.createTableWithLock(table, true, false);
} catch (DdlException e) {
throw new MetaNotFoundException(e.getMessage());
}
if (!Env.isCheckpointThread()) {
if (table.getType() == TableType.OLAP) {
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
OlapTable olapTable = (OlapTable) table;
long dbId = db.getId();
long tableId = table.getId();
for (Partition partition : olapTable.getAllPartitions()) {
long partitionId = partition.getId();
TStorageMedium medium = olapTable.getPartitionInfo().getDataProperty(partitionId)
.getStorageMedium();
for (MaterializedIndex mIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = mIndex.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash, medium);
for (Tablet tablet : mIndex.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
DynamicPartitionUtil.registerOrRemoveDynamicPartitionTable(dbId, olapTable, true);
}
}
}
public void addPartition(Database db, String tableName, AddPartitionClause addPartitionClause) throws DdlException {
SinglePartitionDesc singlePartitionDesc = addPartitionClause.getSingeRangePartitionDesc();
DistributionDesc distributionDesc = addPartitionClause.getDistributionDesc();
boolean isTempPartition = addPartitionClause.isTempPartition();
DistributionInfo distributionInfo;
Map<Long, MaterializedIndexMeta> indexIdToMeta;
Set<String> bfColumns;
String partitionName = singlePartitionDesc.getPartitionName();
Table table = db.getOlapTableOrDdlException(tableName);
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table[" + tableName + "]'s state is not NORMAL");
}
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (partitionInfo.getType() != PartitionType.RANGE && partitionInfo.getType() != PartitionType.LIST) {
throw new DdlException("Only support adding partition to range and list partitioned table");
}
if (olapTable.checkPartitionNameExist(partitionName)) {
if (singlePartitionDesc.isSetIfNotExists()) {
LOG.info("add partition[{}] which already exists", partitionName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_SAME_NAME_PARTITION, partitionName);
}
}
Map<String, String> properties = singlePartitionDesc.getProperties();
ReplicaAllocation replicaAlloc = olapTable.getDefaultReplicaAllocation();
if (!properties.containsKey(PropertyAnalyzer.PROPERTIES_REPLICATION_NUM) && !properties.containsKey(
PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION)) {
properties.put(PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION, replicaAlloc.toCreateStmt());
}
if (!properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {
properties.put(PropertyAnalyzer.PROPERTIES_INMEMORY, olapTable.isInMemory().toString());
}
singlePartitionDesc.analyze(partitionInfo.getPartitionColumns().size(), properties);
partitionInfo.createAndCheckPartitionItem(singlePartitionDesc, isTempPartition);
List<Column> baseSchema = olapTable.getBaseSchema();
DistributionInfo defaultDistributionInfo = olapTable.getDefaultDistributionInfo();
if (distributionDesc != null) {
distributionInfo = distributionDesc.toDistributionInfo(baseSchema);
if (distributionInfo.getType() != defaultDistributionInfo.getType()) {
throw new DdlException("Cannot assign different distribution type. default is: "
+ defaultDistributionInfo.getType());
}
if (distributionInfo.getType() == DistributionInfoType.HASH) {
HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo;
List<Column> newDistriCols = hashDistributionInfo.getDistributionColumns();
List<Column> defaultDistriCols
= ((HashDistributionInfo) defaultDistributionInfo).getDistributionColumns();
if (!newDistriCols.equals(defaultDistriCols)) {
throw new DdlException(
"Cannot assign hash distribution with different distribution cols. " + "default is: "
+ defaultDistriCols);
}
if (hashDistributionInfo.getBucketNum() <= 0) {
throw new DdlException("Cannot assign hash distribution buckets less than 1");
}
}
} else {
distributionInfo = defaultDistributionInfo.toDistributionDesc().toDistributionInfo(baseSchema);
}
if (Env.getCurrentColocateIndex().isColocateTable(olapTable.getId())) {
String fullGroupName = db.getId() + "_" + olapTable.getColocateGroup();
ColocateGroupSchema groupSchema = Env.getCurrentColocateIndex().getGroupSchema(fullGroupName);
Preconditions.checkNotNull(groupSchema);
groupSchema.checkDistribution(distributionInfo);
groupSchema.checkReplicaAllocation(singlePartitionDesc.getReplicaAlloc());
}
indexIdToMeta = olapTable.getCopiedIndexIdToMeta();
bfColumns = olapTable.getCopiedBfColumns();
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
} finally {
olapTable.readUnlock();
}
Preconditions.checkNotNull(distributionInfo);
Preconditions.checkNotNull(olapTable);
Preconditions.checkNotNull(indexIdToMeta);
DataProperty dataProperty = singlePartitionDesc.getPartitionDataProperty();
Preconditions.checkNotNull(dataProperty);
long indexNum = indexIdToMeta.size();
long bucketNum = distributionInfo.getBucketNum();
long replicaNum = singlePartitionDesc.getReplicaAlloc().getTotalReplicaNum();
long totalReplicaNum = indexNum * bucketNum * replicaNum;
if (totalReplicaNum >= db.getReplicaQuotaLeftWithLock()) {
throw new DdlException("Database " + db.getFullName() + " table " + tableName + " add partition increasing "
+ totalReplicaNum + " of replica exceeds quota[" + db.getReplicaQuota() + "]");
}
Set<Long> tabletIdSet = new HashSet<Long>();
try {
long partitionId = Env.getCurrentEnv().getNextId();
Partition partition = createPartitionWithIndices(db.getClusterName(), db.getId(), olapTable.getId(),
olapTable.getBaseIndexId(), partitionId, partitionName, indexIdToMeta, distributionInfo,
dataProperty.getStorageMedium(), singlePartitionDesc.getReplicaAlloc(),
singlePartitionDesc.getVersionInfo(), bfColumns, olapTable.getBfFpp(), tabletIdSet,
olapTable.getCopiedIndexes(), singlePartitionDesc.isInMemory(), olapTable.getStorageFormat(),
singlePartitionDesc.getTabletType(), olapTable.getCompressionType(), olapTable.getDataSortInfo(),
olapTable.getEnableUniqueKeyMergeOnWrite(), olapTable.getStoragePolicy());
table = db.getOlapTableOrDdlException(tableName);
table.writeLockOrDdlException();
try {
olapTable = (OlapTable) table;
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table[" + tableName + "]'s state is not NORMAL");
}
if (olapTable.checkPartitionNameExist(partitionName)) {
if (singlePartitionDesc.isSetIfNotExists()) {
LOG.info("add partition[{}] which already exists", partitionName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_SAME_NAME_PARTITION, partitionName);
}
}
boolean metaChanged = false;
if (olapTable.getIndexNameToId().size() != indexIdToMeta.size()) {
metaChanged = true;
} else {
for (Map.Entry<Long, MaterializedIndexMeta> entry : olapTable.getIndexIdToMeta().entrySet()) {
long indexId = entry.getKey();
if (!indexIdToMeta.containsKey(indexId)) {
metaChanged = true;
break;
}
if (indexIdToMeta.get(indexId).getSchemaHash() != entry.getValue().getSchemaHash()) {
metaChanged = true;
break;
}
}
}
if (metaChanged) {
throw new DdlException("Table[" + tableName + "]'s meta has been changed. try again.");
}
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (partitionInfo.getType() != PartitionType.RANGE && partitionInfo.getType() != PartitionType.LIST) {
throw new DdlException("Only support adding partition to range and list partitioned table");
}
partitionInfo.handleNewSinglePartitionDesc(singlePartitionDesc, partitionId, isTempPartition);
if (isTempPartition) {
olapTable.addTempPartition(partition);
} else {
olapTable.addPartition(partition);
}
PartitionPersistInfo info = null;
if (partitionInfo.getType() == PartitionType.RANGE) {
info = new PartitionPersistInfo(db.getId(), olapTable.getId(), partition,
partitionInfo.getItem(partitionId).getItems(), ListPartitionItem.DUMMY_ITEM, dataProperty,
partitionInfo.getReplicaAllocation(partitionId), partitionInfo.getIsInMemory(partitionId),
isTempPartition);
} else if (partitionInfo.getType() == PartitionType.LIST) {
info = new PartitionPersistInfo(db.getId(), olapTable.getId(), partition,
RangePartitionItem.DUMMY_ITEM, partitionInfo.getItem(partitionId), dataProperty,
partitionInfo.getReplicaAllocation(partitionId), partitionInfo.getIsInMemory(partitionId),
isTempPartition);
}
Env.getCurrentEnv().getEditLog().logAddPartition(info);
LOG.info("succeed in creating partition[{}], temp: {}", partitionId, isTempPartition);
} finally {
table.writeUnlock();
}
} catch (DdlException e) {
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
throw e;
}
}
public void replayAddPartition(PartitionPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
Partition partition = info.getPartition();
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (info.isTempPartition()) {
olapTable.addTempPartition(partition);
} else {
olapTable.addPartition(partition);
}
PartitionItem partitionItem = null;
if (partitionInfo.getType() == PartitionType.RANGE) {
partitionItem = new RangePartitionItem(info.getRange());
} else if (partitionInfo.getType() == PartitionType.LIST) {
partitionItem = info.getListPartitionItem();
}
partitionInfo.unprotectHandleNewSinglePartitionDesc(partition.getId(), info.isTempPartition(),
partitionItem, info.getDataProperty(), info.getReplicaAlloc(), info.isInMemory());
if (!Env.isCheckpointThread()) {
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = index.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(info.getDbId(), info.getTableId(), partition.getId(),
index.getId(), schemaHash, info.getDataProperty().getStorageMedium());
for (Tablet tablet : index.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
} finally {
olapTable.writeUnlock();
}
}
public void dropPartition(Database db, OlapTable olapTable, DropPartitionClause clause) throws DdlException {
Preconditions.checkArgument(olapTable.isWriteLockHeldByCurrentThread());
String partitionName = clause.getPartitionName();
boolean isTempPartition = clause.isTempPartition();
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table[" + olapTable.getName() + "]'s state is not NORMAL");
}
if (!olapTable.checkPartitionNameExist(partitionName, isTempPartition)) {
if (clause.isSetIfExists()) {
LOG.info("drop partition[{}] which does not exist", partitionName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_DROP_PARTITION_NON_EXISTENT, partitionName);
}
}
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (partitionInfo.getType() != PartitionType.RANGE && partitionInfo.getType() != PartitionType.LIST) {
throw new DdlException("Alter table [" + olapTable.getName() + "] failed. Not a partitioned table");
}
if (isTempPartition) {
olapTable.dropTempPartition(partitionName, true);
} else {
if (!clause.isForceDrop()) {
Partition partition = olapTable.getPartition(partitionName);
if (partition != null) {
if (Env.getCurrentEnv().getGlobalTransactionMgr()
.existCommittedTxns(db.getId(), olapTable.getId(), partition.getId())) {
throw new DdlException(
"There are still some transactions in the COMMITTED state waiting to be completed."
+ " The partition [" + partitionName
+ "] cannot be dropped. If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP partition FORCE\".");
}
}
}
olapTable.dropPartition(db.getId(), partitionName, clause.isForceDrop());
}
DropPartitionInfo info = new DropPartitionInfo(db.getId(), olapTable.getId(), partitionName, isTempPartition,
clause.isForceDrop());
Env.getCurrentEnv().getEditLog().logDropPartition(info);
LOG.info("succeed in dropping partition[{}], is temp : {}, is force : {}", partitionName, isTempPartition,
clause.isForceDrop());
}
public void replayDropPartition(DropPartitionInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
if (info.isTempPartition()) {
olapTable.dropTempPartition(info.getPartitionName(), true);
} else {
olapTable.dropPartition(info.getDbId(), info.getPartitionName(), info.isForceDrop());
}
} finally {
olapTable.writeUnlock();
}
}
public void replayErasePartition(long partitionId) {
Env.getCurrentRecycleBin().replayErasePartition(partitionId);
}
public void replayRecoverPartition(RecoverInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
Env.getCurrentRecycleBin().replayRecoverPartition(olapTable, info.getPartitionId());
} finally {
olapTable.writeUnlock();
}
}
private Partition createPartitionWithIndices(String clusterName, long dbId, long tableId, long baseIndexId,
long partitionId, String partitionName, Map<Long, MaterializedIndexMeta> indexIdToMeta,
DistributionInfo distributionInfo, TStorageMedium storageMedium, ReplicaAllocation replicaAlloc,
Long versionInfo, Set<String> bfColumns, double bfFpp, Set<Long> tabletIdSet, List<Index> indexes,
boolean isInMemory, TStorageFormat storageFormat, TTabletType tabletType, TCompressionType compressionType,
DataSortInfo dataSortInfo, boolean enableUniqueKeyMergeOnWrite, String storagePolicy) throws DdlException {
Preconditions.checkArgument(baseIndexId != -1);
MaterializedIndex baseIndex = new MaterializedIndex(baseIndexId, IndexState.NORMAL);
Partition partition = new Partition(partitionId, partitionName, baseIndex, distributionInfo);
Map<Long, MaterializedIndex> indexMap = new HashMap<>();
indexMap.put(baseIndexId, baseIndex);
for (long indexId : indexIdToMeta.keySet()) {
if (indexId == baseIndexId) {
continue;
}
MaterializedIndex rollup = new MaterializedIndex(indexId, IndexState.NORMAL);
indexMap.put(indexId, rollup);
}
if (versionInfo != null) {
partition.updateVisibleVersion(versionInfo);
}
long version = partition.getVisibleVersion();
short totalReplicaNum = replicaAlloc.getTotalReplicaNum();
for (Map.Entry<Long, MaterializedIndex> entry : indexMap.entrySet()) {
long indexId = entry.getKey();
MaterializedIndex index = entry.getValue();
MaterializedIndexMeta indexMeta = indexIdToMeta.get(indexId);
int schemaHash = indexMeta.getSchemaHash();
TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash, storageMedium);
createTablets(clusterName, index, ReplicaState.NORMAL, distributionInfo, version, replicaAlloc, tabletMeta,
tabletIdSet);
boolean ok = false;
String errMsg = null;
short shortKeyColumnCount = indexMeta.getShortKeyColumnCount();
TStorageType storageType = indexMeta.getStorageType();
List<Column> schema = indexMeta.getSchema();
KeysType keysType = indexMeta.getKeysType();
int totalTaskNum = index.getTablets().size() * totalReplicaNum;
MarkedCountDownLatch<Long, Long> countDownLatch = new MarkedCountDownLatch<Long, Long>(totalTaskNum);
AgentBatchTask batchTask = new AgentBatchTask();
for (Tablet tablet : index.getTablets()) {
long tabletId = tablet.getId();
for (Replica replica : tablet.getReplicas()) {
long backendId = replica.getBackendId();
long replicaId = replica.getId();
countDownLatch.addMark(backendId, tabletId);
CreateReplicaTask task = new CreateReplicaTask(backendId, dbId, tableId, partitionId, indexId,
tabletId, replicaId, shortKeyColumnCount, schemaHash, version, keysType, storageType,
storageMedium, schema, bfColumns, bfFpp, countDownLatch, indexes, isInMemory, tabletType,
dataSortInfo, compressionType, enableUniqueKeyMergeOnWrite, storagePolicy);
task.setStorageFormat(storageFormat);
batchTask.addTask(task);
AgentTaskQueue.addTask(task);
}
}
AgentTaskExecutor.submit(batchTask);
long timeout = Config.tablet_create_timeout_second * 1000L * totalTaskNum;
timeout = Math.min(timeout, Config.max_create_table_timeout_second * 1000);
try {
ok = countDownLatch.await(timeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
LOG.warn("InterruptedException: ", e);
ok = false;
}
if (!ok || !countDownLatch.getStatus().ok()) {
errMsg = "Failed to create partition[" + partitionName + "]. Timeout.";
AgentTaskQueue.removeBatchTask(batchTask, TTaskType.CREATE);
if (!countDownLatch.getStatus().ok()) {
errMsg += " Error: " + countDownLatch.getStatus().getErrorMsg();
} else {
List<Entry<Long, Long>> unfinishedMarks = countDownLatch.getLeftMarks();
List<Entry<Long, Long>> subList = unfinishedMarks.subList(0, Math.min(unfinishedMarks.size(), 3));
if (!subList.isEmpty()) {
errMsg += " Unfinished mark: " + Joiner.on(", ").join(subList);
}
}
LOG.warn(errMsg);
throw new DdlException(errMsg);
}
if (index.getId() != baseIndexId) {
partition.createRollupIndex(index);
}
}
return partition;
}
private void createMysqlTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
MysqlTable mysqlTable = new MysqlTable(tableId, tableName, columns, stmt.getProperties());
mysqlTable.setComment(stmt.getComment());
if (!db.createTableWithLock(mysqlTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
return;
}
private void createOdbcTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
OdbcTable odbcTable = new OdbcTable(tableId, tableName, columns, stmt.getProperties());
odbcTable.setComment(stmt.getComment());
if (!db.createTableWithLock(odbcTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
return;
}
private Table createEsTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
EsTable esTable = new EsTable(tableName, stmt.getProperties());
List<Column> baseSchema = stmt.getColumns();
if (baseSchema.isEmpty()) {
baseSchema = esTable.genColumnsFromEs();
}
validateColumns(baseSchema);
esTable.setNewFullSchema(baseSchema);
PartitionDesc partitionDesc = stmt.getPartitionDesc();
PartitionInfo partitionInfo;
Map<String, Long> partitionNameToId = Maps.newHashMap();
if (partitionDesc != null) {
partitionInfo = partitionDesc.toPartitionInfo(baseSchema, partitionNameToId, false);
} else {
long partitionId = Env.getCurrentEnv().getNextId();
partitionNameToId.put(tableName, partitionId);
partitionInfo = new SinglePartitionInfo();
}
esTable.setPartitionInfo(partitionInfo);
long tableId = Env.getCurrentEnv().getNextId();
esTable.setId(tableId);
esTable.setComment(stmt.getComment());
esTable.syncTableMetaData();
if (!db.createTableWithLock(esTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table{} with id {}", tableName, tableId);
return esTable;
}
private void createBrokerTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
BrokerTable brokerTable = new BrokerTable(tableId, tableName, columns, stmt.getProperties());
brokerTable.setComment(stmt.getComment());
brokerTable.setBrokerProperties(stmt.getExtProperties());
if (!db.createTableWithLock(brokerTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
return;
}
private void createHiveTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
HiveTable hiveTable = new HiveTable(tableId, tableName, columns, stmt.getProperties());
hiveTable.setComment(stmt.getComment());
HiveMetaStoreClient hiveMetaStoreClient = HiveMetaStoreClientHelper.getClient(
hiveTable.getHiveProperties().get(HiveTable.HIVE_METASTORE_URIS));
if (!HiveMetaStoreClientHelper.tableExists(hiveMetaStoreClient, hiveTable.getHiveDb(),
hiveTable.getHiveTable())) {
throw new DdlException(String.format("Table [%s] dose not exist in Hive.", hiveTable.getHiveDbTable()));
}
if (!db.createTableWithLock(hiveTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private void createHudiTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
HudiTable hudiTable = new HudiTable(tableId, tableName, columns, stmt.getProperties());
hudiTable.setComment(stmt.getComment());
HudiUtils.validateCreateTable(hudiTable);
String metastoreUris = hudiTable.getTableProperties().get(HudiProperty.HUDI_HIVE_METASTORE_URIS);
HiveMetaStoreClient hiveMetaStoreClient = HiveMetaStoreClientHelper.getClient(metastoreUris);
if (!HiveMetaStoreClientHelper.tableExists(hiveMetaStoreClient, hudiTable.getHmsDatabaseName(),
hudiTable.getHmsTableName())) {
throw new DdlException(
String.format("Table [%s] dose not exist in Hive Metastore.", hudiTable.getHmsTableIdentifer()));
}
org.apache.hadoop.hive.metastore.api.Table hiveTable = HiveMetaStoreClientHelper.getTable(
hudiTable.getHmsDatabaseName(), hudiTable.getHmsTableName(), metastoreUris);
if (!HudiUtils.isHudiTable(hiveTable)) {
throw new DdlException(String.format("Table [%s] is not a hudi table.", hudiTable.getHmsTableIdentifer()));
}
if (HudiUtils.isHudiRealtimeTable(hiveTable)) {
throw new DdlException(String.format("Can not support hudi realtime table.", hudiTable.getHmsTableName()));
}
if (!hudiTable.getFullSchema().isEmpty()) {
HudiUtils.validateColumns(hudiTable, hiveTable);
}
switch (hiveTable.getTableType()) {
case "EXTERNAL_TABLE":
case "MANAGED_TABLE":
break;
case "VIRTUAL_VIEW":
default:
throw new DdlException("unsupported hudi table type [" + hiveTable.getTableType() + "].");
}
if (!db.createTableWithLock(hudiTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private void createTablets(String clusterName, MaterializedIndex index, ReplicaState replicaState,
DistributionInfo distributionInfo, long version, ReplicaAllocation replicaAlloc, TabletMeta tabletMeta,
Set<Long> tabletIdSet) throws DdlException {
ColocateTableIndex colocateIndex = Env.getCurrentColocateIndex();
Map<Tag, List<List<Long>>> backendsPerBucketSeq = null;
GroupId groupId = null;
if (colocateIndex.isColocateTable(tabletMeta.getTableId())) {
if (distributionInfo.getType() == DistributionInfoType.RANDOM) {
throw new DdlException("Random distribution for colocate table is unsupported");
}
groupId = colocateIndex.getGroup(tabletMeta.getTableId());
backendsPerBucketSeq = colocateIndex.getBackendsPerBucketSeq(groupId);
}
boolean chooseBackendsArbitrary = backendsPerBucketSeq == null || backendsPerBucketSeq.isEmpty();
if (chooseBackendsArbitrary) {
backendsPerBucketSeq = Maps.newHashMap();
}
for (int i = 0; i < distributionInfo.getBucketNum(); ++i) {
Tablet tablet = new Tablet(Env.getCurrentEnv().getNextId());
index.addTablet(tablet, tabletMeta);
tabletIdSet.add(tablet.getId());
Map<Tag, List<Long>> chosenBackendIds;
if (chooseBackendsArbitrary) {
if (!Config.disable_storage_medium_check) {
chosenBackendIds = Env.getCurrentSystemInfo()
.selectBackendIdsForReplicaCreation(replicaAlloc, clusterName,
tabletMeta.getStorageMedium());
} else {
chosenBackendIds = Env.getCurrentSystemInfo()
.selectBackendIdsForReplicaCreation(replicaAlloc, clusterName, null);
}
for (Map.Entry<Tag, List<Long>> entry : chosenBackendIds.entrySet()) {
backendsPerBucketSeq.putIfAbsent(entry.getKey(), Lists.newArrayList());
backendsPerBucketSeq.get(entry.getKey()).add(entry.getValue());
}
} else {
chosenBackendIds = Maps.newHashMap();
for (Map.Entry<Tag, List<List<Long>>> entry : backendsPerBucketSeq.entrySet()) {
chosenBackendIds.put(entry.getKey(), entry.getValue().get(i));
}
}
short totalReplicaNum = (short) 0;
for (List<Long> backendIds : chosenBackendIds.values()) {
for (long backendId : backendIds) {
long replicaId = Env.getCurrentEnv().getNextId();
Replica replica = new Replica(replicaId, backendId, replicaState, version,
tabletMeta.getOldSchemaHash());
tablet.addReplica(replica);
totalReplicaNum++;
}
}
Preconditions.checkState(totalReplicaNum == replicaAlloc.getTotalReplicaNum(),
totalReplicaNum + " vs. " + replicaAlloc.getTotalReplicaNum());
}
if (groupId != null && chooseBackendsArbitrary) {
colocateIndex.addBackendsPerBucketSeq(groupId, backendsPerBucketSeq);
ColocatePersistInfo info = ColocatePersistInfo.createForBackendsPerBucketSeq(groupId, backendsPerBucketSeq);
Env.getCurrentEnv().getEditLog().logColocateBackendsPerBucketSeq(info);
}
}
/*
* generate and check columns' order and key's existence
*/
private void validateColumns(List<Column> columns) throws DdlException {
if (columns.isEmpty()) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_MUST_HAVE_COLUMNS);
}
boolean encounterValue = false;
boolean hasKey = false;
for (Column column : columns) {
if (column.isKey()) {
if (encounterValue) {
ErrorReport.reportDdlException(ErrorCode.ERR_OLAP_KEY_MUST_BEFORE_VALUE);
}
hasKey = true;
} else {
encounterValue = true;
}
}
if (!hasKey) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_MUST_HAVE_KEYS);
}
}
/*
* Truncate specified table or partitions.
* The main idea is:
*
* 1. using the same schema to create new table(partitions)
* 2. use the new created table(partitions) to replace the old ones.
*
* if no partition specified, it will truncate all partitions of this table, including all temp partitions,
* otherwise, it will only truncate those specified partitions.
*
*/
public void truncateTable(TruncateTableStmt truncateTableStmt) throws DdlException {
TableRef tblRef = truncateTableStmt.getTblRef();
TableName dbTbl = tblRef.getName();
Map<String, Long> origPartitions = Maps.newHashMap();
Map<Long, DistributionInfo> partitionsDistributionInfo = Maps.newHashMap();
OlapTable copiedTbl;
boolean truncateEntireTable = tblRef.getPartitionNames() == null;
Database db = (Database) getDbOrDdlException(dbTbl.getDb());
OlapTable olapTable = db.getOlapTableOrDdlException(dbTbl.getTbl());
olapTable.readLock();
try {
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table' state is not NORMAL: " + olapTable.getState());
}
if (!truncateEntireTable) {
for (String partName : tblRef.getPartitionNames().getPartitionNames()) {
Partition partition = olapTable.getPartition(partName);
if (partition == null) {
throw new DdlException("Partition " + partName + " does not exist");
}
origPartitions.put(partName, partition.getId());
partitionsDistributionInfo.put(partition.getId(), partition.getDistributionInfo());
}
} else {
for (Partition partition : olapTable.getPartitions()) {
origPartitions.put(partition.getName(), partition.getId());
partitionsDistributionInfo.put(partition.getId(), partition.getDistributionInfo());
}
}
copiedTbl = olapTable.selectiveCopy(origPartitions.keySet(), IndexExtState.VISIBLE, false);
} finally {
olapTable.readUnlock();
}
List<Partition> newPartitions = Lists.newArrayList();
Set<Long> tabletIdSet = Sets.newHashSet();
try {
for (Map.Entry<String, Long> entry : origPartitions.entrySet()) {
long oldPartitionId = entry.getValue();
long newPartitionId = Env.getCurrentEnv().getNextId();
Partition newPartition = createPartitionWithIndices(db.getClusterName(), db.getId(), copiedTbl.getId(),
copiedTbl.getBaseIndexId(), newPartitionId, entry.getKey(), copiedTbl.getIndexIdToMeta(),
partitionsDistributionInfo.get(oldPartitionId),
copiedTbl.getPartitionInfo().getDataProperty(oldPartitionId).getStorageMedium(),
copiedTbl.getPartitionInfo().getReplicaAllocation(oldPartitionId), null /* version info */,
copiedTbl.getCopiedBfColumns(), copiedTbl.getBfFpp(), tabletIdSet, copiedTbl.getCopiedIndexes(),
copiedTbl.isInMemory(), copiedTbl.getStorageFormat(),
copiedTbl.getPartitionInfo().getTabletType(oldPartitionId), copiedTbl.getCompressionType(),
copiedTbl.getDataSortInfo(), copiedTbl.getEnableUniqueKeyMergeOnWrite(),
olapTable.getStoragePolicy());
newPartitions.add(newPartition);
}
} catch (DdlException e) {
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
throw e;
}
Preconditions.checkState(origPartitions.size() == newPartitions.size());
olapTable = (OlapTable) db.getTableOrDdlException(copiedTbl.getId());
olapTable.writeLockOrDdlException();
try {
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table' state is not NORMAL: " + olapTable.getState());
}
for (Map.Entry<String, Long> entry : origPartitions.entrySet()) {
Partition partition = copiedTbl.getPartition(entry.getValue());
if (partition == null || !partition.getName().equalsIgnoreCase(entry.getKey())) {
throw new DdlException("Partition [" + entry.getKey() + "] is changed");
}
}
boolean metaChanged = false;
if (olapTable.getIndexNameToId().size() != copiedTbl.getIndexNameToId().size()) {
metaChanged = true;
} else {
Map<Long, Integer> copiedIndexIdToSchemaHash = copiedTbl.getIndexIdToSchemaHash();
for (Map.Entry<Long, Integer> entry : olapTable.getIndexIdToSchemaHash().entrySet()) {
long indexId = entry.getKey();
if (!copiedIndexIdToSchemaHash.containsKey(indexId)) {
metaChanged = true;
break;
}
if (!copiedIndexIdToSchemaHash.get(indexId).equals(entry.getValue())) {
metaChanged = true;
break;
}
}
}
if (metaChanged) {
throw new DdlException("Table[" + copiedTbl.getName() + "]'s meta has been changed. try again.");
}
truncateTableInternal(olapTable, newPartitions, truncateEntireTable);
TruncateTableInfo info = new TruncateTableInfo(db.getId(), olapTable.getId(), newPartitions,
truncateEntireTable);
Env.getCurrentEnv().getEditLog().logTruncateTable(info);
} finally {
olapTable.writeUnlock();
}
LOG.info("finished to truncate table {}, partitions: {}", tblRef.getName().toSql(), tblRef.getPartitionNames());
}
private void truncateTableInternal(OlapTable olapTable, List<Partition> newPartitions, boolean isEntireTable) {
Set<Long> oldTabletIds = Sets.newHashSet();
for (Partition newPartition : newPartitions) {
Partition oldPartition = olapTable.replacePartition(newPartition);
for (MaterializedIndex index : oldPartition.getMaterializedIndices(IndexExtState.ALL)) {
index.getTablets().stream().forEach(t -> {
oldTabletIds.add(t.getId());
});
}
}
if (isEntireTable) {
olapTable.dropAllTempPartitions();
}
for (Long tabletId : oldTabletIds) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
}
public void replayTruncateTable(TruncateTableInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTblId(), TableType.OLAP);
olapTable.writeLock();
try {
truncateTableInternal(olapTable, info.getPartitions(), info.isEntireTable());
if (!Env.isCheckpointThread()) {
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
for (Partition partition : info.getPartitions()) {
long partitionId = partition.getId();
TStorageMedium medium = olapTable.getPartitionInfo().getDataProperty(partitionId)
.getStorageMedium();
for (MaterializedIndex mIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = mIndex.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(db.getId(), olapTable.getId(), partitionId, indexId,
schemaHash, medium);
for (Tablet tablet : mIndex.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
}
} finally {
olapTable.writeUnlock();
}
}
public void replayAlterExternalTableSchema(String dbName, String tableName, List<Column> newSchema)
throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(dbName);
Table table = db.getTableOrMetaException(tableName);
table.writeLock();
try {
table.setNewFullSchema(newSchema);
} finally {
table.writeUnlock();
}
}
public void clearDbs() {
if (idToDb != null) {
idToDb.clear();
}
if (fullNameToDb != null) {
fullNameToDb.clear();
}
}
/**
* create cluster
*
* @param stmt
* @throws DdlException
*/
public void createCluster(CreateClusterStmt stmt) throws DdlException {
final String clusterName = stmt.getClusterName();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (nameToCluster.containsKey(clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_HAS_EXIST, clusterName);
} else {
List<Long> backendList = Env.getCurrentSystemInfo().createCluster(clusterName, stmt.getInstanceNum());
if (backendList != null || stmt.getInstanceNum() == 0) {
final long id = Env.getCurrentEnv().getNextId();
final Cluster cluster = new Cluster(clusterName, id);
cluster.setBackendIdList(backendList);
unprotectCreateCluster(cluster);
if (clusterName.equals(SystemInfoService.DEFAULT_CLUSTER)) {
for (Database db : idToDb.values()) {
if (db.getClusterName().equals(SystemInfoService.DEFAULT_CLUSTER)) {
cluster.addDb(db.getFullName(), db.getId());
}
}
}
Env.getCurrentEnv().getEditLog().logCreateCluster(cluster);
LOG.info("finish to create cluster: {}", clusterName);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_BE_NOT_ENOUGH);
}
}
} finally {
unlock();
}
UserIdentity adminUser = new UserIdentity(PaloAuth.ADMIN_USER, "%");
try {
adminUser.analyze(stmt.getClusterName());
} catch (AnalysisException e) {
LOG.error("should not happen", e);
}
Env.getCurrentEnv().getAuth().createUser(new CreateUserStmt(new UserDesc(adminUser, "", true)));
}
private void unprotectCreateCluster(Cluster cluster) {
for (Long id : cluster.getBackendIdList()) {
final Backend backend = Env.getCurrentSystemInfo().getBackend(id);
backend.setOwnerClusterName(cluster.getName());
backend.setBackendState(BackendState.using);
}
idToCluster.put(cluster.getId(), cluster);
nameToCluster.put(cluster.getName(), cluster);
final InfoSchemaDb infoDb = new InfoSchemaDb(cluster.getName());
infoDb.setClusterName(cluster.getName());
unprotectCreateDb(infoDb);
if (cluster.getName().equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
Env.getCurrentEnv().setDefaultClusterCreated(true);
}
}
/**
* replay create cluster
*
* @param cluster
*/
public void replayCreateCluster(Cluster cluster) {
tryLock(true);
try {
unprotectCreateCluster(cluster);
} finally {
unlock();
}
}
/**
* drop cluster and cluster's db must be have deleted
*
* @param stmt
* @throws DdlException
*/
public void dropCluster(DropClusterStmt stmt) throws DdlException {
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
final String clusterName = stmt.getClusterName();
final Cluster cluster = nameToCluster.get(clusterName);
if (cluster == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
final List<Backend> backends = Env.getCurrentSystemInfo().getClusterBackends(clusterName);
for (Backend backend : backends) {
if (backend.isDecommissioned()) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_ALTER_BE_IN_DECOMMISSION, clusterName);
}
}
if (cluster.getDbNames().size() > 1) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DELETE_DB_EXIST, clusterName);
}
Env.getCurrentSystemInfo().releaseBackends(clusterName, false /* is not replay */);
final ClusterInfo info = new ClusterInfo(clusterName, cluster.getId());
unprotectDropCluster(info, false /* is not replay */);
Env.getCurrentEnv().getEditLog().logDropCluster(info);
} finally {
unlock();
}
Env.getCurrentEnv().getAuth().dropUserOfCluster(stmt.getClusterName(), true /* is replay */);
}
private void unprotectDropCluster(ClusterInfo info, boolean isReplay) {
Env.getCurrentSystemInfo().releaseBackends(info.getClusterName(), isReplay);
idToCluster.remove(info.getClusterId());
nameToCluster.remove(info.getClusterName());
final Database infoSchemaDb = fullNameToDb.get(InfoSchemaDb.getFullInfoSchemaDbName(info.getClusterName()));
fullNameToDb.remove(infoSchemaDb.getFullName());
idToDb.remove(infoSchemaDb.getId());
}
public void replayDropCluster(ClusterInfo info) throws DdlException {
tryLock(true);
try {
unprotectDropCluster(info, true/* is replay */);
} finally {
unlock();
}
Env.getCurrentEnv().getAuth().dropUserOfCluster(info.getClusterName(), true /* is replay */);
}
public void replayExpandCluster(ClusterInfo info) {
tryLock(true);
try {
final Cluster cluster = nameToCluster.get(info.getClusterName());
cluster.addBackends(info.getBackendIdList());
for (Long beId : info.getBackendIdList()) {
Backend be = Env.getCurrentSystemInfo().getBackend(beId);
if (be == null) {
continue;
}
be.setOwnerClusterName(info.getClusterName());
be.setBackendState(BackendState.using);
}
} finally {
unlock();
}
}
/**
* modify cluster: Expansion or shrink
*
* @param stmt
* @throws DdlException
*/
public void processModifyCluster(AlterClusterStmt stmt) throws UserException {
final String clusterName = stmt.getAlterClusterName();
final int newInstanceNum = stmt.getInstanceNum();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
Cluster cluster = nameToCluster.get(clusterName);
if (cluster == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
final List<Long> backendIdsInCluster = cluster.getBackendIdList();
for (Long beId : backendIdsInCluster) {
Backend be = Env.getCurrentSystemInfo().getBackend(beId);
if (be.isDecommissioned()) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_ALTER_BE_IN_DECOMMISSION, clusterName);
}
}
final int oldInstanceNum = backendIdsInCluster.size();
if (newInstanceNum > oldInstanceNum) {
final List<Long> expandBackendIds = Env.getCurrentSystemInfo()
.calculateExpansionBackends(clusterName, newInstanceNum - oldInstanceNum);
if (expandBackendIds == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_BE_NOT_ENOUGH);
}
cluster.addBackends(expandBackendIds);
final ClusterInfo info = new ClusterInfo(clusterName, cluster.getId(), expandBackendIds);
Env.getCurrentEnv().getEditLog().logExpandCluster(info);
} else if (newInstanceNum < oldInstanceNum) {
final List<Long> decomBackendIds = Env.getCurrentSystemInfo()
.calculateDecommissionBackends(clusterName, oldInstanceNum - newInstanceNum);
if (decomBackendIds == null || decomBackendIds.size() == 0) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_BACKEND_ERROR);
}
List<String> hostPortList = Lists.newArrayList();
for (Long id : decomBackendIds) {
final Backend backend = Env.getCurrentSystemInfo().getBackend(id);
hostPortList.add(
new StringBuilder().append(backend.getHost()).append(":").append(backend.getHeartbeatPort())
.toString());
}
final DecommissionBackendClause clause = new DecommissionBackendClause(hostPortList);
try {
clause.analyze(null);
clause.setType(DecommissionType.ClusterDecommission);
AlterSystemStmt alterStmt = new AlterSystemStmt(clause);
alterStmt.setClusterName(clusterName);
Env.getCurrentEnv().getAlterInstance().processAlterCluster(alterStmt);
} catch (AnalysisException e) {
Preconditions.checkState(false, "should not happened: " + e.getMessage());
}
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_ALTER_BE_NO_CHANGE, newInstanceNum);
}
} finally {
unlock();
}
}
/**
* @param ctx
* @param clusterName
* @throws DdlException
*/
public void changeCluster(ConnectContext ctx, String clusterName) throws DdlException {
if (!Env.getCurrentEnv().getAuth().checkCanEnterCluster(ConnectContext.get(), clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_AUTHORITY, ConnectContext.get().getQualifiedUser(),
"enter");
}
if (!nameToCluster.containsKey(clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
ctx.setCluster(clusterName);
}
/**
* migrate db to link dest cluster
*
* @param stmt
* @throws DdlException
*/
public void migrateDb(MigrateDbStmt stmt) throws DdlException {
final String srcClusterName = stmt.getSrcCluster();
final String destClusterName = stmt.getDestCluster();
final String srcDbName = stmt.getSrcDb();
final String destDbName = stmt.getDestDb();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!nameToCluster.containsKey(srcClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_CLUSTER_NOT_EXIST, srcClusterName);
}
if (!nameToCluster.containsKey(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DEST_CLUSTER_NOT_EXIST, destClusterName);
}
if (srcClusterName.equals(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATE_SAME_CLUSTER);
}
final Cluster srcCluster = this.nameToCluster.get(srcClusterName);
if (!srcCluster.containDb(srcDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_DB_NOT_EXIST, srcDbName);
}
final Cluster destCluster = this.nameToCluster.get(destClusterName);
if (!destCluster.containLink(destDbName, srcDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATION_NO_LINK, srcDbName, destDbName);
}
final Database db = fullNameToDb.get(srcDbName);
final int maxReplicationNum = db.getMaxReplicationNum();
if (maxReplicationNum > destCluster.getBackendIdList().size()) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATE_BE_NOT_ENOUGH, destClusterName);
}
if (db.getDbState() == DbState.LINK) {
final BaseParam param = new BaseParam();
param.addStringParam(destDbName);
param.addLongParam(db.getId());
param.addStringParam(srcDbName);
param.addStringParam(destClusterName);
param.addStringParam(srcClusterName);
fullNameToDb.remove(db.getFullName());
srcCluster.removeDb(db.getFullName(), db.getId());
destCluster.removeLinkDb(param);
destCluster.addDb(destDbName, db.getId());
db.writeLock();
try {
db.setDbState(DbState.MOVE);
db.setClusterName(destClusterName);
db.setName(destDbName);
db.setAttachDb(srcDbName);
} finally {
db.writeUnlock();
}
Env.getCurrentEnv().getEditLog().logMigrateCluster(param);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATION_NO_LINK, srcDbName, destDbName);
}
} finally {
unlock();
}
}
public void replayMigrateDb(BaseParam param) {
final String desDbName = param.getStringParam();
final String srcDbName = param.getStringParam(1);
final String desClusterName = param.getStringParam(2);
final String srcClusterName = param.getStringParam(3);
tryLock(true);
try {
final Cluster desCluster = this.nameToCluster.get(desClusterName);
final Cluster srcCluster = this.nameToCluster.get(srcClusterName);
final Database db = fullNameToDb.get(srcDbName);
if (db.getDbState() == DbState.LINK) {
fullNameToDb.remove(db.getFullName());
srcCluster.removeDb(db.getFullName(), db.getId());
desCluster.removeLinkDb(param);
desCluster.addDb(param.getStringParam(), db.getId());
db.writeLock();
db.setName(desDbName);
db.setAttachDb(srcDbName);
db.setDbState(DbState.MOVE);
db.setClusterName(desClusterName);
db.writeUnlock();
}
} finally {
unlock();
}
}
public void replayLinkDb(BaseParam param) {
final String desClusterName = param.getStringParam(2);
final String srcDbName = param.getStringParam(1);
final String desDbName = param.getStringParam();
tryLock(true);
try {
final Cluster desCluster = this.nameToCluster.get(desClusterName);
final Database srcDb = fullNameToDb.get(srcDbName);
srcDb.writeLock();
srcDb.setDbState(DbState.LINK);
srcDb.setAttachDb(desDbName);
srcDb.writeUnlock();
desCluster.addLinkDb(param);
fullNameToDb.put(desDbName, srcDb);
} finally {
unlock();
}
}
/**
* link src db to dest db. we use java's quotation Mechanism to realize db hard links
*
* @param stmt
* @throws DdlException
*/
public void linkDb(LinkDbStmt stmt) throws DdlException {
final String srcClusterName = stmt.getSrcCluster();
final String destClusterName = stmt.getDestCluster();
final String srcDbName = stmt.getSrcDb();
final String destDbName = stmt.getDestDb();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!nameToCluster.containsKey(srcClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_CLUSTER_NOT_EXIST, srcClusterName);
}
if (!nameToCluster.containsKey(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DEST_CLUSTER_NOT_EXIST, destClusterName);
}
if (srcClusterName.equals(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATE_SAME_CLUSTER);
}
if (fullNameToDb.containsKey(destDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_DB_CREATE_EXISTS, destDbName);
}
final Cluster srcCluster = this.nameToCluster.get(srcClusterName);
final Cluster destCluster = this.nameToCluster.get(destClusterName);
if (!srcCluster.containDb(srcDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_DB_NOT_EXIST, srcDbName);
}
final Database srcDb = fullNameToDb.get(srcDbName);
if (srcDb.getDbState() != DbState.NORMAL) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE,
ClusterNamespace.getNameFromFullName(srcDbName));
}
srcDb.writeLock();
try {
srcDb.setDbState(DbState.LINK);
srcDb.setAttachDb(destDbName);
} finally {
srcDb.writeUnlock();
}
final long id = Env.getCurrentEnv().getNextId();
final BaseParam param = new BaseParam();
param.addStringParam(destDbName);
param.addStringParam(srcDbName);
param.addLongParam(id);
param.addLongParam(srcDb.getId());
param.addStringParam(destClusterName);
param.addStringParam(srcClusterName);
destCluster.addLinkDb(param);
fullNameToDb.put(destDbName, srcDb);
Env.getCurrentEnv().getEditLog().logLinkCluster(param);
} finally {
unlock();
}
}
public Cluster getCluster(String clusterName) {
return nameToCluster.get(clusterName);
}
public List<String> getClusterNames() {
return new ArrayList<String>(nameToCluster.keySet());
}
/**
* get migrate progress , when finish migration, next cloneCheck will reset dbState
*
* @return
*/
public Set<BaseParam> getMigrations() {
final Set<BaseParam> infos = Sets.newHashSet();
for (Database db : fullNameToDb.values()) {
db.readLock();
try {
if (db.getDbState() == DbState.MOVE) {
int tabletTotal = 0;
int tabletQuorum = 0;
final Set<Long> beIds = Sets.newHashSet(
Env.getCurrentSystemInfo().getClusterBackendIds(db.getClusterName()));
final Set<String> tableNames = db.getTableNamesWithLock();
for (String tableName : tableNames) {
Table table = db.getTableNullable(tableName);
if (table == null || table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
for (Partition partition : olapTable.getPartitions()) {
ReplicaAllocation replicaAlloc = olapTable.getPartitionInfo()
.getReplicaAllocation(partition.getId());
short totalReplicaNum = replicaAlloc.getTotalReplicaNum();
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(
IndexExtState.ALL)) {
if (materializedIndex.getState() != IndexState.NORMAL) {
continue;
}
for (Tablet tablet : materializedIndex.getTablets()) {
int replicaNum = 0;
int quorum = totalReplicaNum / 2 + 1;
for (Replica replica : tablet.getReplicas()) {
if (replica.getState() != ReplicaState.CLONE && beIds.contains(
replica.getBackendId())) {
replicaNum++;
}
}
if (replicaNum > quorum) {
replicaNum = quorum;
}
tabletQuorum = tabletQuorum + replicaNum;
tabletTotal = tabletTotal + quorum;
}
}
}
} finally {
olapTable.readUnlock();
}
}
final BaseParam info = new BaseParam();
info.addStringParam(db.getClusterName());
info.addStringParam(db.getAttachDb());
info.addStringParam(db.getFullName());
final float percentage = tabletTotal > 0 ? (float) tabletQuorum / (float) tabletTotal : 0f;
info.addFloatParam(percentage);
infos.add(info);
}
} finally {
db.readUnlock();
}
}
return infos;
}
public long loadCluster(DataInputStream dis, long checksum) throws IOException, DdlException {
int clusterCount = dis.readInt();
checksum ^= clusterCount;
for (long i = 0; i < clusterCount; ++i) {
final Cluster cluster = Cluster.read(dis);
checksum ^= cluster.getId();
List<Long> latestBackendIds = Env.getCurrentSystemInfo().getClusterBackendIds(cluster.getName());
if (latestBackendIds.size() != cluster.getBackendIdList().size()) {
LOG.warn(
"Cluster:" + cluster.getName() + ", backends in Cluster is " + cluster.getBackendIdList().size()
+ ", backends in SystemInfoService is " + cluster.getBackendIdList().size());
}
cluster.setBackendIdList(latestBackendIds);
String dbName = InfoSchemaDb.getFullInfoSchemaDbName(cluster.getName());
InfoSchemaDb db = (InfoSchemaDb) Env.getServingEnv().getInternalDataSource().getDbNullable(dbName);
if (db == null) {
db = new InfoSchemaDb(cluster.getName());
db.setClusterName(cluster.getName());
}
String errMsg = "InfoSchemaDb id shouldn't larger than 10000, please restart your FE server";
Preconditions.checkState(db.getId() < Env.NEXT_ID_INIT_VALUE, errMsg);
idToDb.put(db.getId(), db);
fullNameToDb.put(db.getFullName(), db);
cluster.addDb(dbName, db.getId());
idToCluster.put(cluster.getId(), cluster);
nameToCluster.put(cluster.getName(), cluster);
}
LOG.info("finished replay cluster from image");
return checksum;
}
public void initDefaultCluster() {
final List<Long> backendList = Lists.newArrayList();
final List<Backend> defaultClusterBackends = Env.getCurrentSystemInfo()
.getClusterBackends(SystemInfoService.DEFAULT_CLUSTER);
for (Backend backend : defaultClusterBackends) {
backendList.add(backend.getId());
}
final long id = Env.getCurrentEnv().getNextId();
final Cluster cluster = new Cluster(SystemInfoService.DEFAULT_CLUSTER, id);
Set<String> beHost = Sets.newHashSet();
for (Backend be : defaultClusterBackends) {
if (beHost.contains(be.getHost())) {
LOG.error("found more than one backends in same host: {}", be.getHost());
System.exit(-1);
} else {
beHost.add(be.getHost());
}
}
cluster.setBackendIdList(backendList);
unprotectCreateCluster(cluster);
for (Database db : idToDb.values()) {
db.setClusterName(SystemInfoService.DEFAULT_CLUSTER);
cluster.addDb(db.getFullName(), db.getId());
}
Env.getCurrentEnv().setDefaultClusterCreated(true);
Env.getCurrentEnv().getEditLog().logCreateCluster(cluster);
}
public void replayUpdateDb(DatabaseInfo info) {
final Database db = fullNameToDb.get(info.getDbName());
db.setClusterName(info.getClusterName());
db.setDbState(info.getDbState());
}
public long saveCluster(CountingDataOutputStream dos, long checksum) throws IOException {
final int clusterCount = idToCluster.size();
checksum ^= clusterCount;
dos.writeInt(clusterCount);
for (Map.Entry<Long, Cluster> entry : idToCluster.entrySet()) {
long clusterId = entry.getKey();
if (clusterId >= Env.NEXT_ID_INIT_VALUE) {
checksum ^= clusterId;
final Cluster cluster = entry.getValue();
cluster.write(dos);
}
}
return checksum;
}
public void replayUpdateClusterAndBackends(BackendIdsUpdateInfo info) {
for (long id : info.getBackendList()) {
final Backend backend = Env.getCurrentSystemInfo().getBackend(id);
final Cluster cluster = nameToCluster.get(backend.getOwnerClusterName());
cluster.removeBackend(id);
backend.setDecommissioned(false);
backend.clearClusterName();
backend.setBackendState(BackendState.free);
}
}
public List<String> getClusterDbNames(String clusterName) throws AnalysisException {
final Cluster cluster = nameToCluster.get(clusterName);
if (cluster == null) {
throw new AnalysisException("No cluster selected");
}
return Lists.newArrayList(cluster.getDbNames());
}
public long saveDb(CountingDataOutputStream dos, long checksum) throws IOException {
int dbCount = idToDb.size() - nameToCluster.keySet().size();
checksum ^= dbCount;
dos.writeInt(dbCount);
for (Map.Entry<Long, Database> entry : idToDb.entrySet()) {
Database db = entry.getValue();
String dbName = db.getFullName();
if (!InfoSchemaDb.isInfoSchemaDb(dbName)) {
checksum ^= entry.getKey();
db.write(dos);
}
}
return checksum;
}
public long loadDb(DataInputStream dis, long checksum) throws IOException, DdlException {
int dbCount = dis.readInt();
long newChecksum = checksum ^ dbCount;
for (long i = 0; i < dbCount; ++i) {
Database db = new Database();
db.readFields(dis);
newChecksum ^= db.getId();
idToDb.put(db.getId(), db);
fullNameToDb.put(db.getFullName(), db);
if (db.getDbState() == DbState.LINK) {
fullNameToDb.put(db.getAttachDb(), db);
}
Env.getCurrentGlobalTransactionMgr().addDatabaseTransactionMgr(db.getId());
}
recreateTabletInvertIndex();
getEsRepository().loadTableFromCatalog();
LOG.info("finished replay databases from image");
return newChecksum;
}
} | class InternalDataSource implements DataSourceIf<Database> {
public static final String INTERNAL_DS_NAME = "internal";
public static final long INTERNAL_DS_ID = 0L;
private static final Logger LOG = LogManager.getLogger(InternalDataSource.class);
private QueryableReentrantLock lock = new QueryableReentrantLock(true);
private ConcurrentHashMap<Long, Database> idToDb = new ConcurrentHashMap<>();
private ConcurrentHashMap<String, Database> fullNameToDb = new ConcurrentHashMap<>();
private ConcurrentHashMap<Long, Cluster> idToCluster = new ConcurrentHashMap<>();
private ConcurrentHashMap<String, Cluster> nameToCluster = new ConcurrentHashMap<>();
@Getter
private EsRepository esRepository = new EsRepository();
@Getter
private IcebergTableCreationRecordMgr icebergTableCreationRecordMgr = new IcebergTableCreationRecordMgr();
@Override
public long getId() {
return INTERNAL_DS_ID;
}
@Override
public String getType() {
return "internal";
}
@Override
public String getName() {
return INTERNAL_DS_NAME;
}
@Override
public List<String> getDbNames() {
return Lists.newArrayList(fullNameToDb.keySet());
}
@Nullable
@Override
public Database getDbNullable(String dbName) {
if (fullNameToDb.containsKey(dbName)) {
return fullNameToDb.get(dbName);
} else {
String fullName = ClusterNamespace.getNameFromFullName(dbName);
if (fullName.equalsIgnoreCase(InfoSchemaDb.DATABASE_NAME)) {
String clusterName = ClusterNamespace.getClusterNameFromFullName(dbName);
fullName = ClusterNamespace.getFullName(clusterName, fullName.toLowerCase());
return fullNameToDb.get(fullName);
}
}
return null;
}
@Nullable
@Override
public Database getDbNullable(long dbId) {
return idToDb.get(dbId);
}
@Override
public Map<String, String> getProperties() {
return Maps.newHashMap();
}
@Override
public void modifyDatasourceName(String name) {
LOG.warn("Ignore the modify datasource name in build-in datasource.");
}
@Override
public void modifyDatasourceProps(Map<String, String> props) {
LOG.warn("Ignore the modify datasource props in build-in datasource.");
}
private boolean tryLock(boolean mustLock) {
while (true) {
try {
if (!lock.tryLock(Config.catalog_try_lock_timeout_ms, TimeUnit.MILLISECONDS)) {
if (LOG.isDebugEnabled()) {
Thread owner = lock.getOwner();
if (owner != null) {
LOG.debug("catalog lock is held by: {}", Util.dumpThread(owner, 10));
}
}
if (mustLock) {
continue;
} else {
return false;
}
}
return true;
} catch (InterruptedException e) {
LOG.warn("got exception while getting catalog lock", e);
if (mustLock) {
continue;
} else {
return lock.isHeldByCurrentThread();
}
}
}
}
public List<Long> getDbIds() {
return Lists.newArrayList(idToDb.keySet());
}
private void unlock() {
if (lock.isHeldByCurrentThread()) {
this.lock.unlock();
}
}
/**
* create the tablet inverted index from metadata.
*/
public void recreateTabletInvertIndex() {
if (Env.isCheckpointThread()) {
return;
}
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
for (Database db : this.fullNameToDb.values()) {
long dbId = db.getId();
for (Table table : db.getTables()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
long tableId = olapTable.getId();
Collection<Partition> allPartitions = olapTable.getAllPartitions();
for (Partition partition : allPartitions) {
long partitionId = partition.getId();
TStorageMedium medium = olapTable.getPartitionInfo().getDataProperty(partitionId)
.getStorageMedium();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = index.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash, medium);
for (Tablet tablet : index.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
}
}
}
/**
* Entry of creating a database.
*
* @param stmt
* @throws DdlException
*/
public void createDb(CreateDbStmt stmt) throws DdlException {
final String clusterName = stmt.getClusterName();
String fullDbName = stmt.getFullDbName();
Map<String, String> properties = stmt.getProperties();
long id = Env.getCurrentEnv().getNextId();
Database db = new Database(id, fullDbName);
db.setClusterName(clusterName);
db.setDbProperties(new DatabaseProperty(properties).checkAndBuildProperties());
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!nameToCluster.containsKey(clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER, clusterName);
}
if (fullNameToDb.containsKey(fullDbName)) {
if (stmt.isSetIfNotExists()) {
LOG.info("create database[{}] which already exists", fullDbName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_DB_CREATE_EXISTS, fullDbName);
}
} else {
unprotectCreateDb(db);
Env.getCurrentEnv().getEditLog().logCreateDb(db);
}
} finally {
unlock();
}
LOG.info("createDb dbName = " + fullDbName + ", id = " + id);
if (db.getDbProperties().getIcebergProperty().isExist()) {
icebergTableCreationRecordMgr.registerDb(db);
}
}
/**
* For replaying creating database.
*
* @param db
*/
public void unprotectCreateDb(Database db) {
idToDb.put(db.getId(), db);
fullNameToDb.put(db.getFullName(), db);
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.addDb(db.getFullName(), db.getId());
Env.getCurrentGlobalTransactionMgr().addDatabaseTransactionMgr(db.getId());
}
public void addCluster(Cluster cluster) {
nameToCluster.put(cluster.getName(), cluster);
idToCluster.put(cluster.getId(), cluster);
}
/**
* replayCreateDb.
*
* @param db
*/
public void replayCreateDb(Database db) {
tryLock(true);
try {
unprotectCreateDb(db);
} finally {
unlock();
}
}
public void dropDb(DropDbStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!fullNameToDb.containsKey(dbName)) {
if (stmt.isSetIfExists()) {
LOG.info("drop database[{}] which does not exist", dbName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_DB_DROP_EXISTS, dbName);
}
}
Database db = this.fullNameToDb.get(dbName);
db.writeLock();
try {
if (!stmt.isForceDrop()) {
if (Env.getCurrentEnv().getGlobalTransactionMgr().existCommittedTxns(db.getId(), null, null)) {
throw new DdlException(
"There are still some transactions in the COMMITTED state waiting to be completed. "
+ "The database [" + dbName
+ "] cannot be dropped. If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP database FORCE\".");
}
}
if (db.getDbState() == DbState.LINK && dbName.equals(db.getAttachDb())) {
final DropLinkDbAndUpdateDbInfo info = new DropLinkDbAndUpdateDbInfo();
fullNameToDb.remove(db.getAttachDb());
db.setDbState(DbState.NORMAL);
info.setUpdateDbState(DbState.NORMAL);
final Cluster cluster = nameToCluster.get(
ClusterNamespace.getClusterNameFromFullName(db.getAttachDb()));
final BaseParam param = new BaseParam();
param.addStringParam(db.getAttachDb());
param.addLongParam(db.getId());
cluster.removeLinkDb(param);
info.setDropDbCluster(cluster.getName());
info.setDropDbId(db.getId());
info.setDropDbName(db.getAttachDb());
Env.getCurrentEnv().getEditLog().logDropLinkDb(info);
return;
}
if (db.getDbState() == DbState.LINK && dbName.equals(db.getFullName())) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE,
ClusterNamespace.getNameFromFullName(dbName));
return;
}
if (dbName.equals(db.getAttachDb()) && db.getDbState() == DbState.MOVE) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE,
ClusterNamespace.getNameFromFullName(dbName));
return;
}
Set<String> tableNames = db.getTableNamesWithLock();
List<Table> tableList = db.getTablesOnIdOrder();
MetaLockUtils.writeLockTables(tableList);
try {
if (!stmt.isForceDrop()) {
for (Table table : tableList) {
if (table.getType() == TableType.OLAP) {
OlapTable olapTable = (OlapTable) table;
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("The table [" + olapTable.getState() + "]'s state is "
+ olapTable.getState() + ", cannot be dropped."
+ " please cancel the operation on olap table firstly."
+ " If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP table FORCE\".");
}
}
}
}
unprotectDropDb(db, stmt.isForceDrop(), false);
} finally {
MetaLockUtils.writeUnlockTables(tableList);
}
if (!stmt.isForceDrop()) {
Env.getCurrentRecycleBin().recycleDatabase(db, tableNames);
} else {
Env.getCurrentEnv().eraseDatabase(db.getId(), false);
}
} finally {
db.writeUnlock();
}
idToDb.remove(db.getId());
fullNameToDb.remove(db.getFullName());
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.removeDb(dbName, db.getId());
DropDbInfo info = new DropDbInfo(dbName, stmt.isForceDrop());
Env.getCurrentEnv().getEditLog().logDropDb(info);
} finally {
unlock();
}
LOG.info("finish drop database[{}], is force : {}", dbName, stmt.isForceDrop());
}
public void unprotectDropDb(Database db, boolean isForeDrop, boolean isReplay) {
if (db.getDbProperties().getIcebergProperty().isExist()) {
icebergTableCreationRecordMgr.deregisterDb(db);
}
for (Table table : db.getTables()) {
unprotectDropTable(db, table, isForeDrop, isReplay);
}
db.markDropped();
}
public void replayDropLinkDb(DropLinkDbAndUpdateDbInfo info) {
tryLock(true);
try {
final Database db = this.fullNameToDb.remove(info.getDropDbName());
db.setDbState(info.getUpdateDbState());
final Cluster cluster = nameToCluster.get(info.getDropDbCluster());
final BaseParam param = new BaseParam();
param.addStringParam(db.getAttachDb());
param.addLongParam(db.getId());
cluster.removeLinkDb(param);
} finally {
unlock();
}
}
public void replayDropDb(String dbName, boolean isForceDrop) throws DdlException {
tryLock(true);
try {
Database db = fullNameToDb.get(dbName);
db.writeLock();
try {
Set<String> tableNames = db.getTableNamesWithLock();
List<Table> tableList = db.getTablesOnIdOrder();
MetaLockUtils.writeLockTables(tableList);
try {
unprotectDropDb(db, isForceDrop, true);
} finally {
MetaLockUtils.writeUnlockTables(tableList);
}
if (!isForceDrop) {
Env.getCurrentRecycleBin().recycleDatabase(db, tableNames);
} else {
Env.getCurrentEnv().eraseDatabase(db.getId(), false);
}
} finally {
db.writeUnlock();
}
fullNameToDb.remove(dbName);
idToDb.remove(db.getId());
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.removeDb(dbName, db.getId());
} finally {
unlock();
}
}
public void recoverDatabase(RecoverDbStmt recoverStmt) throws DdlException {
if (getDb(recoverStmt.getDbName()).isPresent()) {
throw new DdlException("Database[" + recoverStmt.getDbName() + "] already exist.");
}
Database db = Env.getCurrentRecycleBin().recoverDatabase(recoverStmt.getDbName());
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
db.writeLock();
List<Table> tableList = db.getTablesOnIdOrder();
MetaLockUtils.writeLockTables(tableList);
try {
if (fullNameToDb.containsKey(db.getFullName())) {
throw new DdlException("Database[" + db.getFullName() + "] already exist.");
}
fullNameToDb.put(db.getFullName(), db);
idToDb.put(db.getId(), db);
final Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.addDb(db.getFullName(), db.getId());
RecoverInfo recoverInfo = new RecoverInfo(db.getId(), -1L, -1L);
Env.getCurrentEnv().getEditLog().logRecoverDb(recoverInfo);
db.unmarkDropped();
} finally {
MetaLockUtils.writeUnlockTables(tableList);
db.writeUnlock();
unlock();
}
LOG.info("recover database[{}]", db.getId());
}
public void recoverTable(RecoverTableStmt recoverStmt) throws DdlException {
String dbName = recoverStmt.getDbName();
String tableName = recoverStmt.getTableName();
Database db = (Database) getDbOrDdlException(dbName);
db.writeLockOrDdlException();
try {
if (db.getTable(tableName).isPresent()) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
if (!Env.getCurrentRecycleBin().recoverTable(db, tableName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_TABLE, tableName, dbName);
}
} finally {
db.writeUnlock();
}
}
public void recoverPartition(RecoverPartitionStmt recoverStmt) throws DdlException {
String dbName = recoverStmt.getDbName();
String tableName = recoverStmt.getTableName();
Database db = getDbOrDdlException(dbName);
OlapTable olapTable = db.getOlapTableOrDdlException(tableName);
olapTable.writeLockOrDdlException();
try {
String partitionName = recoverStmt.getPartitionName();
if (olapTable.getPartition(partitionName) != null) {
throw new DdlException("partition[" + partitionName + "] already exist in table[" + tableName + "]");
}
Env.getCurrentRecycleBin().recoverPartition(db.getId(), olapTable, partitionName);
} finally {
olapTable.writeUnlock();
}
}
public void replayEraseDatabase(long dbId) throws DdlException {
Env.getCurrentRecycleBin().replayEraseDatabase(dbId);
}
public void replayRecoverDatabase(RecoverInfo info) {
long dbId = info.getDbId();
Database db = Env.getCurrentRecycleBin().replayRecoverDatabase(dbId);
replayCreateDb(db);
LOG.info("replay recover db[{}]", dbId);
}
public void alterDatabaseQuota(AlterDatabaseQuotaStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
Database db = (Database) getDbOrDdlException(dbName);
QuotaType quotaType = stmt.getQuotaType();
db.writeLockOrDdlException();
try {
if (quotaType == QuotaType.DATA) {
db.setDataQuota(stmt.getQuota());
} else if (quotaType == QuotaType.REPLICA) {
db.setReplicaQuota(stmt.getQuota());
}
long quota = stmt.getQuota();
DatabaseInfo dbInfo = new DatabaseInfo(dbName, "", quota, quotaType);
Env.getCurrentEnv().getEditLog().logAlterDb(dbInfo);
} finally {
db.writeUnlock();
}
}
public void replayAlterDatabaseQuota(String dbName, long quota, QuotaType quotaType) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(dbName);
db.writeLock();
try {
if (quotaType == QuotaType.DATA) {
db.setDataQuota(quota);
} else if (quotaType == QuotaType.REPLICA) {
db.setReplicaQuota(quota);
}
} finally {
db.writeUnlock();
}
}
public void renameDatabase(AlterDatabaseRename stmt) throws DdlException {
String fullDbName = stmt.getDbName();
String newFullDbName = stmt.getNewDbName();
String clusterName = stmt.getClusterName();
if (fullDbName.equals(newFullDbName)) {
throw new DdlException("Same database name");
}
Database db = null;
Cluster cluster = null;
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
cluster = nameToCluster.get(clusterName);
if (cluster == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
db = fullNameToDb.get(fullDbName);
if (db == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_BAD_DB_ERROR, fullDbName);
}
if (db.getDbState() == DbState.LINK || db.getDbState() == DbState.MOVE) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_RENAME_DB_ERR, fullDbName);
}
if (fullNameToDb.get(newFullDbName) != null) {
throw new DdlException("Database name[" + newFullDbName + "] is already used");
}
cluster.removeDb(db.getFullName(), db.getId());
cluster.addDb(newFullDbName, db.getId());
db.setNameWithLock(newFullDbName);
fullNameToDb.remove(fullDbName);
fullNameToDb.put(newFullDbName, db);
DatabaseInfo dbInfo = new DatabaseInfo(fullDbName, newFullDbName, -1L, QuotaType.NONE);
Env.getCurrentEnv().getEditLog().logDatabaseRename(dbInfo);
} finally {
unlock();
}
LOG.info("rename database[{}] to [{}]", fullDbName, newFullDbName);
}
public void replayRenameDatabase(String dbName, String newDbName) {
tryLock(true);
try {
Database db = fullNameToDb.get(dbName);
Cluster cluster = nameToCluster.get(db.getClusterName());
cluster.removeDb(db.getFullName(), db.getId());
db.setName(newDbName);
cluster.addDb(newDbName, db.getId());
fullNameToDb.remove(dbName);
fullNameToDb.put(newDbName, db);
} finally {
unlock();
}
LOG.info("replay rename database {} to {}", dbName, newDbName);
}
public void dropTable(DropTableStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
Database db = (Database) getDbOrDdlException(dbName);
db.writeLockOrDdlException();
try {
Table table = db.getTableNullable(tableName);
if (table == null) {
if (stmt.isSetIfExists()) {
LOG.info("drop table[{}] which does not exist", tableName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_TABLE, tableName, dbName);
}
}
if (stmt.isView()) {
if (!(table instanceof View)) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "VIEW");
}
} else {
if (table instanceof View) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "TABLE");
}
}
if (!stmt.isForceDrop()) {
if (Env.getCurrentEnv().getGlobalTransactionMgr().existCommittedTxns(db.getId(), table.getId(), null)) {
throw new DdlException(
"There are still some transactions in the COMMITTED state waiting to be completed. "
+ "The table [" + tableName
+ "] cannot be dropped. If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP table FORCE\".");
}
}
DropInfo info = new DropInfo(db.getId(), table.getId(), -1L, stmt.isForceDrop());
table.writeLock();
try {
if (table instanceof OlapTable && !stmt.isForceDrop()) {
OlapTable olapTable = (OlapTable) table;
if ((olapTable.getState() != OlapTableState.NORMAL)) {
throw new DdlException("The table [" + tableName + "]'s state is " + olapTable.getState()
+ ", cannot be dropped." + " please cancel the operation on olap table firstly."
+ " If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP table FORCE\".");
}
}
unprotectDropTable(db, table, stmt.isForceDrop(), false);
} finally {
table.writeUnlock();
}
Env.getCurrentEnv().getEditLog().logDropTable(info);
} finally {
db.writeUnlock();
}
LOG.info("finished dropping table: {} from db: {}, is force: {}", tableName, dbName, stmt.isForceDrop());
}
public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, boolean isReplay) {
if (table.getType() == TableType.ELASTICSEARCH) {
esRepository.deRegisterTable(table.getId());
} else if (table.getType() == TableType.OLAP) {
((OlapTable) table).dropAllTempPartitions();
} else if (table.getType() == TableType.ICEBERG) {
icebergTableCreationRecordMgr.deregisterTable(db, (IcebergTable) table);
}
db.dropTable(table.getName());
if (!isForceDrop) {
Env.getCurrentRecycleBin().recycleTable(db.getId(), table, isReplay);
} else {
if (table.getType() == TableType.OLAP) {
Env.getCurrentEnv().onEraseOlapTable((OlapTable) table, isReplay);
}
}
LOG.info("finished dropping table[{}] in db[{}]", table.getName(), db.getFullName());
return true;
}
public void replayDropTable(Database db, long tableId, boolean isForceDrop) throws MetaNotFoundException {
Table table = db.getTableOrMetaException(tableId);
db.writeLock();
table.writeLock();
try {
unprotectDropTable(db, table, isForceDrop, true);
} finally {
table.writeUnlock();
db.writeUnlock();
}
}
public void replayEraseTable(long tableId) {
Env.getCurrentRecycleBin().replayEraseTable(tableId);
}
public void replayRecoverTable(RecoverInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
db.writeLock();
try {
Env.getCurrentRecycleBin().replayRecoverTable(db, info.getTableId());
} finally {
db.writeUnlock();
}
}
private void unprotectAddReplica(OlapTable olapTable, ReplicaPersistInfo info) {
LOG.debug("replay add a replica {}", info);
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex materializedIndex = partition.getIndex(info.getIndexId());
Tablet tablet = materializedIndex.getTablet(info.getTabletId());
int schemaHash = info.getSchemaHash();
if (schemaHash == -1) {
schemaHash = olapTable.getSchemaHashByIndexId(info.getIndexId());
}
Replica replica =
new Replica(info.getReplicaId(), info.getBackendId(), info.getVersion(), schemaHash, info.getDataSize(),
info.getRemoteDataSize(), info.getRowCount(), ReplicaState.NORMAL, info.getLastFailedVersion(),
info.getLastSuccessVersion());
tablet.addReplica(replica);
}
private void unprotectUpdateReplica(OlapTable olapTable, ReplicaPersistInfo info) {
LOG.debug("replay update a replica {}", info);
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex materializedIndex = partition.getIndex(info.getIndexId());
Tablet tablet = materializedIndex.getTablet(info.getTabletId());
Replica replica = tablet.getReplicaByBackendId(info.getBackendId());
Preconditions.checkNotNull(replica, info);
replica.updateVersionInfo(info.getVersion(), info.getDataSize(), info.getRemoteDataSize(), info.getRowCount());
replica.setBad(false);
}
public void replayAddReplica(ReplicaPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
unprotectAddReplica(olapTable, info);
} finally {
olapTable.writeUnlock();
}
}
public void replayUpdateReplica(ReplicaPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
unprotectUpdateReplica(olapTable, info);
} finally {
olapTable.writeUnlock();
}
}
public void unprotectDeleteReplica(OlapTable olapTable, ReplicaPersistInfo info) {
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex materializedIndex = partition.getIndex(info.getIndexId());
Tablet tablet = materializedIndex.getTablet(info.getTabletId());
tablet.deleteReplicaByBackendId(info.getBackendId());
}
public void replayDeleteReplica(ReplicaPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
unprotectDeleteReplica(olapTable, info);
} finally {
olapTable.writeUnlock();
}
}
/**
* Following is the step to create an olap table:
* 1. create columns
* 2. create partition info
* 3. create distribution info
* 4. set table id and base index id
* 5. set bloom filter columns
* 6. set and build TableProperty includes:
* 6.1. dynamicProperty
* 6.2. replicationNum
* 6.3. inMemory
* 6.4. storageFormat
* 6.5. compressionType
* 7. set index meta
* 8. check colocation properties
* 9. create tablet in BE
* 10. add this table to FE's meta
* 11. add this table to ColocateGroup if necessary
*/
public void createTable(CreateTableStmt stmt) throws UserException {
String engineName = stmt.getEngineName();
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
Database db = (Database) getDbOrDdlException(dbName);
if (!stmt.isExternal()) {
Env.getCurrentSystemInfo().checkClusterCapacity(stmt.getClusterName());
db.checkQuota();
}
if (db.getTable(tableName).isPresent()) {
if (stmt.isSetIfNotExists()) {
LOG.info("create table[{}] which already exists", tableName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
if (engineName.equals("olap")) {
createOlapTable(db, stmt);
return;
} else if (engineName.equals("odbc")) {
createOdbcTable(db, stmt);
return;
} else if (engineName.equals("mysql")) {
createMysqlTable(db, stmt);
return;
} else if (engineName.equals("broker")) {
createBrokerTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("elasticsearch") || engineName.equalsIgnoreCase("es")) {
createEsTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("hive")) {
createHiveTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("iceberg")) {
IcebergCatalogMgr.createIcebergTable(db, stmt);
return;
} else if (engineName.equalsIgnoreCase("hudi")) {
createHudiTable(db, stmt);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_STORAGE_ENGINE, engineName);
}
Preconditions.checkState(false);
}
public void createTableLike(CreateTableLikeStmt stmt) throws DdlException {
try {
DatabaseIf db = getDbOrDdlException(stmt.getExistedDbName());
TableIf table = db.getTableOrDdlException(stmt.getExistedTableName());
if (table.getType() == TableType.VIEW) {
throw new DdlException("Not support create table from a View");
}
List<String> createTableStmt = Lists.newArrayList();
table.readLock();
try {
if (table.getType() == TableType.OLAP) {
if (!CollectionUtils.isEmpty(stmt.getRollupNames())) {
OlapTable olapTable = (OlapTable) table;
for (String rollupIndexName : stmt.getRollupNames()) {
if (!olapTable.hasMaterializedIndex(rollupIndexName)) {
throw new DdlException("Rollup index[" + rollupIndexName + "] not exists in Table["
+ olapTable.getName() + "]");
}
}
}
} else if (!CollectionUtils.isEmpty(stmt.getRollupNames()) || stmt.isWithAllRollup()) {
throw new DdlException("Table[" + table.getName() + "] is external, not support rollup copy");
}
Env.getDdlStmt(stmt, stmt.getDbName(), table, createTableStmt, null, null, false, false, true);
if (createTableStmt.isEmpty()) {
ErrorReport.reportDdlException(ErrorCode.ERROR_CREATE_TABLE_LIKE_EMPTY, "CREATE");
}
} finally {
table.readUnlock();
}
CreateTableStmt parsedCreateTableStmt = (CreateTableStmt) SqlParserUtils.parseAndAnalyzeStmt(
createTableStmt.get(0), ConnectContext.get());
parsedCreateTableStmt.setTableName(stmt.getTableName());
parsedCreateTableStmt.setIfNotExists(stmt.isIfNotExists());
createTable(parsedCreateTableStmt);
} catch (UserException e) {
throw new DdlException("Failed to execute CREATE TABLE LIKE " + stmt.getExistedTableName() + ". Reason: "
+ e.getMessage());
}
}
/**
* Create table for select.
**/
public void createTableAsSelect(CreateTableAsSelectStmt stmt) throws DdlException {
try {
List<String> columnNames = stmt.getColumnNames();
CreateTableStmt createTableStmt = stmt.getCreateTableStmt();
QueryStmt queryStmt = stmt.getQueryStmt();
ArrayList<Expr> resultExprs = queryStmt.getResultExprs();
ArrayList<String> colLabels = queryStmt.getColLabels();
int size = resultExprs.size();
int colNameIndex = 0;
for (int i = 0; i < size; ++i) {
String name;
if (columnNames != null) {
name = columnNames.get(i);
} else {
name = colLabels.get(i);
}
try {
FeNameFormat.checkColumnName(name);
} catch (AnalysisException exception) {
name = "_col" + (colNameIndex++);
}
TypeDef typeDef;
Expr resultExpr = resultExprs.get(i);
Type resultType = resultExpr.getType();
if (resultType.isStringType() && resultType.getLength() < 0) {
typeDef = new TypeDef(Type.STRING);
} else if (resultType.isDecimalV2() && resultType.equals(ScalarType.DECIMALV2)) {
typeDef = new TypeDef(ScalarType.createDecimalType(27, 9));
} else if (resultType.isDecimalV3()) {
typeDef = new TypeDef(ScalarType.createDecimalType(resultType.getPrecision(),
((ScalarType) resultType).getScalarScale()));
} else {
typeDef = new TypeDef(resultExpr.getType());
}
ColumnDef columnDef;
if (resultExpr.getSrcSlotRef() == null) {
columnDef = new ColumnDef(name, typeDef, false, null, true, new DefaultValue(false, null), "");
} else {
Column column = resultExpr.getSrcSlotRef().getDesc().getColumn();
boolean setDefault = StringUtils.isNotBlank(column.getDefaultValue());
columnDef = new ColumnDef(name, typeDef, column.isKey(), column.getAggregationType(),
column.isAllowNull(), new DefaultValue(setDefault, column.getDefaultValue()),
column.getComment());
}
createTableStmt.addColumnDef(columnDef);
if (createTableStmt.getDistributionDesc() == null && i == 0) {
createTableStmt.setDistributionDesc(new HashDistributionDesc(10, Lists.newArrayList(name)));
}
}
Analyzer dummyRootAnalyzer = new Analyzer(Env.getCurrentEnv(), ConnectContext.get());
createTableStmt.analyze(dummyRootAnalyzer);
createTable(createTableStmt);
} catch (UserException e) {
throw new DdlException("Failed to execute CTAS Reason: " + e.getMessage());
}
}
public void replayCreateTable(String dbName, Table table) throws MetaNotFoundException {
Database db = this.fullNameToDb.get(dbName);
try {
db.createTableWithLock(table, true, false);
} catch (DdlException e) {
throw new MetaNotFoundException(e.getMessage());
}
if (!Env.isCheckpointThread()) {
if (table.getType() == TableType.OLAP) {
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
OlapTable olapTable = (OlapTable) table;
long dbId = db.getId();
long tableId = table.getId();
for (Partition partition : olapTable.getAllPartitions()) {
long partitionId = partition.getId();
TStorageMedium medium = olapTable.getPartitionInfo().getDataProperty(partitionId)
.getStorageMedium();
for (MaterializedIndex mIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = mIndex.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash, medium);
for (Tablet tablet : mIndex.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
DynamicPartitionUtil.registerOrRemoveDynamicPartitionTable(dbId, olapTable, true);
}
}
}
public void addPartition(Database db, String tableName, AddPartitionClause addPartitionClause) throws DdlException {
SinglePartitionDesc singlePartitionDesc = addPartitionClause.getSingeRangePartitionDesc();
DistributionDesc distributionDesc = addPartitionClause.getDistributionDesc();
boolean isTempPartition = addPartitionClause.isTempPartition();
DistributionInfo distributionInfo;
Map<Long, MaterializedIndexMeta> indexIdToMeta;
Set<String> bfColumns;
String partitionName = singlePartitionDesc.getPartitionName();
Table table = db.getOlapTableOrDdlException(tableName);
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table[" + tableName + "]'s state is not NORMAL");
}
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (partitionInfo.getType() != PartitionType.RANGE && partitionInfo.getType() != PartitionType.LIST) {
throw new DdlException("Only support adding partition to range and list partitioned table");
}
if (olapTable.checkPartitionNameExist(partitionName)) {
if (singlePartitionDesc.isSetIfNotExists()) {
LOG.info("add partition[{}] which already exists", partitionName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_SAME_NAME_PARTITION, partitionName);
}
}
Map<String, String> properties = singlePartitionDesc.getProperties();
ReplicaAllocation replicaAlloc = olapTable.getDefaultReplicaAllocation();
if (!properties.containsKey(PropertyAnalyzer.PROPERTIES_REPLICATION_NUM) && !properties.containsKey(
PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION)) {
properties.put(PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION, replicaAlloc.toCreateStmt());
}
if (!properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {
properties.put(PropertyAnalyzer.PROPERTIES_INMEMORY, olapTable.isInMemory().toString());
}
singlePartitionDesc.analyze(partitionInfo.getPartitionColumns().size(), properties);
partitionInfo.createAndCheckPartitionItem(singlePartitionDesc, isTempPartition);
List<Column> baseSchema = olapTable.getBaseSchema();
DistributionInfo defaultDistributionInfo = olapTable.getDefaultDistributionInfo();
if (distributionDesc != null) {
distributionInfo = distributionDesc.toDistributionInfo(baseSchema);
if (distributionInfo.getType() != defaultDistributionInfo.getType()) {
throw new DdlException("Cannot assign different distribution type. default is: "
+ defaultDistributionInfo.getType());
}
if (distributionInfo.getType() == DistributionInfoType.HASH) {
HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo;
List<Column> newDistriCols = hashDistributionInfo.getDistributionColumns();
List<Column> defaultDistriCols
= ((HashDistributionInfo) defaultDistributionInfo).getDistributionColumns();
if (!newDistriCols.equals(defaultDistriCols)) {
throw new DdlException(
"Cannot assign hash distribution with different distribution cols. " + "default is: "
+ defaultDistriCols);
}
if (hashDistributionInfo.getBucketNum() <= 0) {
throw new DdlException("Cannot assign hash distribution buckets less than 1");
}
}
} else {
distributionInfo = defaultDistributionInfo.toDistributionDesc().toDistributionInfo(baseSchema);
}
if (Env.getCurrentColocateIndex().isColocateTable(olapTable.getId())) {
String fullGroupName = db.getId() + "_" + olapTable.getColocateGroup();
ColocateGroupSchema groupSchema = Env.getCurrentColocateIndex().getGroupSchema(fullGroupName);
Preconditions.checkNotNull(groupSchema);
groupSchema.checkDistribution(distributionInfo);
groupSchema.checkReplicaAllocation(singlePartitionDesc.getReplicaAlloc());
}
indexIdToMeta = olapTable.getCopiedIndexIdToMeta();
bfColumns = olapTable.getCopiedBfColumns();
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
} finally {
olapTable.readUnlock();
}
Preconditions.checkNotNull(distributionInfo);
Preconditions.checkNotNull(olapTable);
Preconditions.checkNotNull(indexIdToMeta);
DataProperty dataProperty = singlePartitionDesc.getPartitionDataProperty();
Preconditions.checkNotNull(dataProperty);
long indexNum = indexIdToMeta.size();
long bucketNum = distributionInfo.getBucketNum();
long replicaNum = singlePartitionDesc.getReplicaAlloc().getTotalReplicaNum();
long totalReplicaNum = indexNum * bucketNum * replicaNum;
if (totalReplicaNum >= db.getReplicaQuotaLeftWithLock()) {
throw new DdlException("Database " + db.getFullName() + " table " + tableName + " add partition increasing "
+ totalReplicaNum + " of replica exceeds quota[" + db.getReplicaQuota() + "]");
}
Set<Long> tabletIdSet = new HashSet<>();
long bufferSize = 1 + totalReplicaNum + indexNum * bucketNum;
IdGeneratorBuffer idGeneratorBuffer = Env.getCurrentEnv().getIdGeneratorBuffer(bufferSize);
try {
long partitionId = idGeneratorBuffer.getNextId();
Partition partition = createPartitionWithIndices(db.getClusterName(), db.getId(), olapTable.getId(),
olapTable.getBaseIndexId(), partitionId, partitionName, indexIdToMeta, distributionInfo,
dataProperty.getStorageMedium(), singlePartitionDesc.getReplicaAlloc(),
singlePartitionDesc.getVersionInfo(), bfColumns, olapTable.getBfFpp(), tabletIdSet,
olapTable.getCopiedIndexes(), singlePartitionDesc.isInMemory(), olapTable.getStorageFormat(),
singlePartitionDesc.getTabletType(), olapTable.getCompressionType(), olapTable.getDataSortInfo(),
olapTable.getEnableUniqueKeyMergeOnWrite(), olapTable.getStoragePolicy(), idGeneratorBuffer);
table = db.getOlapTableOrDdlException(tableName);
table.writeLockOrDdlException();
try {
olapTable = (OlapTable) table;
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table[" + tableName + "]'s state is not NORMAL");
}
if (olapTable.checkPartitionNameExist(partitionName)) {
if (singlePartitionDesc.isSetIfNotExists()) {
LOG.info("add partition[{}] which already exists", partitionName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_SAME_NAME_PARTITION, partitionName);
}
}
boolean metaChanged = false;
if (olapTable.getIndexNameToId().size() != indexIdToMeta.size()) {
metaChanged = true;
} else {
for (Map.Entry<Long, MaterializedIndexMeta> entry : olapTable.getIndexIdToMeta().entrySet()) {
long indexId = entry.getKey();
if (!indexIdToMeta.containsKey(indexId)) {
metaChanged = true;
break;
}
if (indexIdToMeta.get(indexId).getSchemaHash() != entry.getValue().getSchemaHash()) {
metaChanged = true;
break;
}
}
}
if (metaChanged) {
throw new DdlException("Table[" + tableName + "]'s meta has been changed. try again.");
}
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (partitionInfo.getType() != PartitionType.RANGE && partitionInfo.getType() != PartitionType.LIST) {
throw new DdlException("Only support adding partition to range and list partitioned table");
}
partitionInfo.handleNewSinglePartitionDesc(singlePartitionDesc, partitionId, isTempPartition);
if (isTempPartition) {
olapTable.addTempPartition(partition);
} else {
olapTable.addPartition(partition);
}
PartitionPersistInfo info = null;
if (partitionInfo.getType() == PartitionType.RANGE) {
info = new PartitionPersistInfo(db.getId(), olapTable.getId(), partition,
partitionInfo.getItem(partitionId).getItems(), ListPartitionItem.DUMMY_ITEM, dataProperty,
partitionInfo.getReplicaAllocation(partitionId), partitionInfo.getIsInMemory(partitionId),
isTempPartition);
} else if (partitionInfo.getType() == PartitionType.LIST) {
info = new PartitionPersistInfo(db.getId(), olapTable.getId(), partition,
RangePartitionItem.DUMMY_ITEM, partitionInfo.getItem(partitionId), dataProperty,
partitionInfo.getReplicaAllocation(partitionId), partitionInfo.getIsInMemory(partitionId),
isTempPartition);
}
Env.getCurrentEnv().getEditLog().logAddPartition(info);
LOG.info("succeed in creating partition[{}], temp: {}", partitionId, isTempPartition);
} finally {
table.writeUnlock();
}
} catch (DdlException e) {
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
throw e;
}
}
public void replayAddPartition(PartitionPersistInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
Partition partition = info.getPartition();
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (info.isTempPartition()) {
olapTable.addTempPartition(partition);
} else {
olapTable.addPartition(partition);
}
PartitionItem partitionItem = null;
if (partitionInfo.getType() == PartitionType.RANGE) {
partitionItem = new RangePartitionItem(info.getRange());
} else if (partitionInfo.getType() == PartitionType.LIST) {
partitionItem = info.getListPartitionItem();
}
partitionInfo.unprotectHandleNewSinglePartitionDesc(partition.getId(), info.isTempPartition(),
partitionItem, info.getDataProperty(), info.getReplicaAlloc(), info.isInMemory());
if (!Env.isCheckpointThread()) {
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = index.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(info.getDbId(), info.getTableId(), partition.getId(),
index.getId(), schemaHash, info.getDataProperty().getStorageMedium());
for (Tablet tablet : index.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
} finally {
olapTable.writeUnlock();
}
}
public void dropPartition(Database db, OlapTable olapTable, DropPartitionClause clause) throws DdlException {
Preconditions.checkArgument(olapTable.isWriteLockHeldByCurrentThread());
String partitionName = clause.getPartitionName();
boolean isTempPartition = clause.isTempPartition();
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table[" + olapTable.getName() + "]'s state is not NORMAL");
}
if (!olapTable.checkPartitionNameExist(partitionName, isTempPartition)) {
if (clause.isSetIfExists()) {
LOG.info("drop partition[{}] which does not exist", partitionName);
return;
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_DROP_PARTITION_NON_EXISTENT, partitionName);
}
}
PartitionInfo partitionInfo = olapTable.getPartitionInfo();
if (partitionInfo.getType() != PartitionType.RANGE && partitionInfo.getType() != PartitionType.LIST) {
throw new DdlException("Alter table [" + olapTable.getName() + "] failed. Not a partitioned table");
}
if (isTempPartition) {
olapTable.dropTempPartition(partitionName, true);
} else {
if (!clause.isForceDrop()) {
Partition partition = olapTable.getPartition(partitionName);
if (partition != null) {
if (Env.getCurrentEnv().getGlobalTransactionMgr()
.existCommittedTxns(db.getId(), olapTable.getId(), partition.getId())) {
throw new DdlException(
"There are still some transactions in the COMMITTED state waiting to be completed."
+ " The partition [" + partitionName
+ "] cannot be dropped. If you want to forcibly drop(cannot be recovered),"
+ " please use \"DROP partition FORCE\".");
}
}
}
olapTable.dropPartition(db.getId(), partitionName, clause.isForceDrop());
}
DropPartitionInfo info = new DropPartitionInfo(db.getId(), olapTable.getId(), partitionName, isTempPartition,
clause.isForceDrop());
Env.getCurrentEnv().getEditLog().logDropPartition(info);
LOG.info("succeed in dropping partition[{}], is temp : {}, is force : {}", partitionName, isTempPartition,
clause.isForceDrop());
}
public void replayDropPartition(DropPartitionInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
if (info.isTempPartition()) {
olapTable.dropTempPartition(info.getPartitionName(), true);
} else {
olapTable.dropPartition(info.getDbId(), info.getPartitionName(), info.isForceDrop());
}
} finally {
olapTable.writeUnlock();
}
}
public void replayErasePartition(long partitionId) {
Env.getCurrentRecycleBin().replayErasePartition(partitionId);
}
public void replayRecoverPartition(RecoverInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);
olapTable.writeLock();
try {
Env.getCurrentRecycleBin().replayRecoverPartition(olapTable, info.getPartitionId());
} finally {
olapTable.writeUnlock();
}
}
private Partition createPartitionWithIndices(String clusterName, long dbId, long tableId, long baseIndexId,
long partitionId, String partitionName, Map<Long, MaterializedIndexMeta> indexIdToMeta,
DistributionInfo distributionInfo, TStorageMedium storageMedium, ReplicaAllocation replicaAlloc,
Long versionInfo, Set<String> bfColumns, double bfFpp, Set<Long> tabletIdSet, List<Index> indexes,
boolean isInMemory, TStorageFormat storageFormat, TTabletType tabletType, TCompressionType compressionType,
DataSortInfo dataSortInfo, boolean enableUniqueKeyMergeOnWrite, String storagePolicy,
IdGeneratorBuffer idGeneratorBuffer) throws DdlException {
Preconditions.checkArgument(baseIndexId != -1);
MaterializedIndex baseIndex = new MaterializedIndex(baseIndexId, IndexState.NORMAL);
Partition partition = new Partition(partitionId, partitionName, baseIndex, distributionInfo);
Map<Long, MaterializedIndex> indexMap = new HashMap<>();
indexMap.put(baseIndexId, baseIndex);
for (long indexId : indexIdToMeta.keySet()) {
if (indexId == baseIndexId) {
continue;
}
MaterializedIndex rollup = new MaterializedIndex(indexId, IndexState.NORMAL);
indexMap.put(indexId, rollup);
}
if (versionInfo != null) {
partition.updateVisibleVersion(versionInfo);
}
long version = partition.getVisibleVersion();
short totalReplicaNum = replicaAlloc.getTotalReplicaNum();
for (Map.Entry<Long, MaterializedIndex> entry : indexMap.entrySet()) {
long indexId = entry.getKey();
MaterializedIndex index = entry.getValue();
MaterializedIndexMeta indexMeta = indexIdToMeta.get(indexId);
int schemaHash = indexMeta.getSchemaHash();
TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash, storageMedium);
createTablets(clusterName, index, ReplicaState.NORMAL, distributionInfo, version, replicaAlloc, tabletMeta,
tabletIdSet, idGeneratorBuffer);
boolean ok = false;
String errMsg = null;
short shortKeyColumnCount = indexMeta.getShortKeyColumnCount();
TStorageType storageType = indexMeta.getStorageType();
List<Column> schema = indexMeta.getSchema();
KeysType keysType = indexMeta.getKeysType();
int totalTaskNum = index.getTablets().size() * totalReplicaNum;
MarkedCountDownLatch<Long, Long> countDownLatch = new MarkedCountDownLatch<Long, Long>(totalTaskNum);
AgentBatchTask batchTask = new AgentBatchTask();
for (Tablet tablet : index.getTablets()) {
long tabletId = tablet.getId();
for (Replica replica : tablet.getReplicas()) {
long backendId = replica.getBackendId();
long replicaId = replica.getId();
countDownLatch.addMark(backendId, tabletId);
CreateReplicaTask task = new CreateReplicaTask(backendId, dbId, tableId, partitionId, indexId,
tabletId, replicaId, shortKeyColumnCount, schemaHash, version, keysType, storageType,
storageMedium, schema, bfColumns, bfFpp, countDownLatch, indexes, isInMemory, tabletType,
dataSortInfo, compressionType, enableUniqueKeyMergeOnWrite, storagePolicy);
task.setStorageFormat(storageFormat);
batchTask.addTask(task);
AgentTaskQueue.addTask(task);
}
}
AgentTaskExecutor.submit(batchTask);
long timeout = Config.tablet_create_timeout_second * 1000L * totalTaskNum;
timeout = Math.min(timeout, Config.max_create_table_timeout_second * 1000);
try {
ok = countDownLatch.await(timeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
LOG.warn("InterruptedException: ", e);
ok = false;
}
if (!ok || !countDownLatch.getStatus().ok()) {
errMsg = "Failed to create partition[" + partitionName + "]. Timeout.";
AgentTaskQueue.removeBatchTask(batchTask, TTaskType.CREATE);
if (!countDownLatch.getStatus().ok()) {
errMsg += " Error: " + countDownLatch.getStatus().getErrorMsg();
} else {
List<Entry<Long, Long>> unfinishedMarks = countDownLatch.getLeftMarks();
List<Entry<Long, Long>> subList = unfinishedMarks.subList(0, Math.min(unfinishedMarks.size(), 3));
if (!subList.isEmpty()) {
errMsg += " Unfinished mark: " + Joiner.on(", ").join(subList);
}
}
LOG.warn(errMsg);
throw new DdlException(errMsg);
}
if (index.getId() != baseIndexId) {
partition.createRollupIndex(index);
}
}
return partition;
}
private void createMysqlTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
MysqlTable mysqlTable = new MysqlTable(tableId, tableName, columns, stmt.getProperties());
mysqlTable.setComment(stmt.getComment());
if (!db.createTableWithLock(mysqlTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private void createOdbcTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
OdbcTable odbcTable = new OdbcTable(tableId, tableName, columns, stmt.getProperties());
odbcTable.setComment(stmt.getComment());
if (!db.createTableWithLock(odbcTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private Table createEsTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
EsTable esTable = new EsTable(tableName, stmt.getProperties());
List<Column> baseSchema = stmt.getColumns();
if (baseSchema.isEmpty()) {
baseSchema = esTable.genColumnsFromEs();
}
validateColumns(baseSchema);
esTable.setNewFullSchema(baseSchema);
PartitionDesc partitionDesc = stmt.getPartitionDesc();
PartitionInfo partitionInfo;
Map<String, Long> partitionNameToId = Maps.newHashMap();
if (partitionDesc != null) {
partitionInfo = partitionDesc.toPartitionInfo(baseSchema, partitionNameToId, false);
} else {
long partitionId = Env.getCurrentEnv().getNextId();
partitionNameToId.put(tableName, partitionId);
partitionInfo = new SinglePartitionInfo();
}
esTable.setPartitionInfo(partitionInfo);
long tableId = Env.getCurrentEnv().getNextId();
esTable.setId(tableId);
esTable.setComment(stmt.getComment());
esTable.syncTableMetaData();
if (!db.createTableWithLock(esTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table{} with id {}", tableName, tableId);
return esTable;
}
private void createBrokerTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
BrokerTable brokerTable = new BrokerTable(tableId, tableName, columns, stmt.getProperties());
brokerTable.setComment(stmt.getComment());
brokerTable.setBrokerProperties(stmt.getExtProperties());
if (!db.createTableWithLock(brokerTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private void createHiveTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
HiveTable hiveTable = new HiveTable(tableId, tableName, columns, stmt.getProperties());
hiveTable.setComment(stmt.getComment());
HiveMetaStoreClient hiveMetaStoreClient = HiveMetaStoreClientHelper.getClient(
hiveTable.getHiveProperties().get(HiveTable.HIVE_METASTORE_URIS));
if (!HiveMetaStoreClientHelper.tableExists(hiveMetaStoreClient, hiveTable.getHiveDb(),
hiveTable.getHiveTable())) {
throw new DdlException(String.format("Table [%s] dose not exist in Hive.", hiveTable.getHiveDbTable()));
}
if (!db.createTableWithLock(hiveTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private void createHudiTable(Database db, CreateTableStmt stmt) throws DdlException {
String tableName = stmt.getTableName();
List<Column> columns = stmt.getColumns();
long tableId = Env.getCurrentEnv().getNextId();
HudiTable hudiTable = new HudiTable(tableId, tableName, columns, stmt.getProperties());
hudiTable.setComment(stmt.getComment());
HudiUtils.validateCreateTable(hudiTable);
String metastoreUris = hudiTable.getTableProperties().get(HudiProperty.HUDI_HIVE_METASTORE_URIS);
HiveMetaStoreClient hiveMetaStoreClient = HiveMetaStoreClientHelper.getClient(metastoreUris);
if (!HiveMetaStoreClientHelper.tableExists(hiveMetaStoreClient, hudiTable.getHmsDatabaseName(),
hudiTable.getHmsTableName())) {
throw new DdlException(
String.format("Table [%s] dose not exist in Hive Metastore.", hudiTable.getHmsTableIdentifer()));
}
org.apache.hadoop.hive.metastore.api.Table hiveTable = HiveMetaStoreClientHelper.getTable(
hudiTable.getHmsDatabaseName(), hudiTable.getHmsTableName(), metastoreUris);
if (!HudiUtils.isHudiTable(hiveTable)) {
throw new DdlException(String.format("Table [%s] is not a hudi table.", hudiTable.getHmsTableIdentifer()));
}
if (HudiUtils.isHudiRealtimeTable(hiveTable)) {
throw new DdlException(String.format("Can not support hudi realtime table.", hudiTable.getHmsTableName()));
}
if (!hudiTable.getFullSchema().isEmpty()) {
HudiUtils.validateColumns(hudiTable, hiveTable);
}
switch (hiveTable.getTableType()) {
case "EXTERNAL_TABLE":
case "MANAGED_TABLE":
break;
case "VIRTUAL_VIEW":
default:
throw new DdlException("unsupported hudi table type [" + hiveTable.getTableType() + "].");
}
if (!db.createTableWithLock(hudiTable, false, stmt.isSetIfNotExists()).first) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
LOG.info("successfully create table[{}-{}]", tableName, tableId);
}
private void createTablets(String clusterName, MaterializedIndex index, ReplicaState replicaState,
DistributionInfo distributionInfo, long version, ReplicaAllocation replicaAlloc, TabletMeta tabletMeta,
Set<Long> tabletIdSet, IdGeneratorBuffer idGeneratorBuffer) throws DdlException {
ColocateTableIndex colocateIndex = Env.getCurrentColocateIndex();
Map<Tag, List<List<Long>>> backendsPerBucketSeq = null;
GroupId groupId = null;
if (colocateIndex.isColocateTable(tabletMeta.getTableId())) {
if (distributionInfo.getType() == DistributionInfoType.RANDOM) {
throw new DdlException("Random distribution for colocate table is unsupported");
}
groupId = colocateIndex.getGroup(tabletMeta.getTableId());
backendsPerBucketSeq = colocateIndex.getBackendsPerBucketSeq(groupId);
}
boolean chooseBackendsArbitrary = backendsPerBucketSeq == null || backendsPerBucketSeq.isEmpty();
if (chooseBackendsArbitrary) {
backendsPerBucketSeq = Maps.newHashMap();
}
for (int i = 0; i < distributionInfo.getBucketNum(); ++i) {
Tablet tablet = new Tablet(idGeneratorBuffer.getNextId());
index.addTablet(tablet, tabletMeta);
tabletIdSet.add(tablet.getId());
Map<Tag, List<Long>> chosenBackendIds;
if (chooseBackendsArbitrary) {
if (!Config.disable_storage_medium_check) {
chosenBackendIds = Env.getCurrentSystemInfo()
.selectBackendIdsForReplicaCreation(replicaAlloc, clusterName,
tabletMeta.getStorageMedium());
} else {
chosenBackendIds = Env.getCurrentSystemInfo()
.selectBackendIdsForReplicaCreation(replicaAlloc, clusterName, null);
}
for (Map.Entry<Tag, List<Long>> entry : chosenBackendIds.entrySet()) {
backendsPerBucketSeq.putIfAbsent(entry.getKey(), Lists.newArrayList());
backendsPerBucketSeq.get(entry.getKey()).add(entry.getValue());
}
} else {
chosenBackendIds = Maps.newHashMap();
for (Map.Entry<Tag, List<List<Long>>> entry : backendsPerBucketSeq.entrySet()) {
chosenBackendIds.put(entry.getKey(), entry.getValue().get(i));
}
}
short totalReplicaNum = (short) 0;
for (List<Long> backendIds : chosenBackendIds.values()) {
for (long backendId : backendIds) {
long replicaId = idGeneratorBuffer.getNextId();
Replica replica = new Replica(replicaId, backendId, replicaState, version,
tabletMeta.getOldSchemaHash());
tablet.addReplica(replica);
totalReplicaNum++;
}
}
Preconditions.checkState(totalReplicaNum == replicaAlloc.getTotalReplicaNum(),
totalReplicaNum + " vs. " + replicaAlloc.getTotalReplicaNum());
}
if (groupId != null && chooseBackendsArbitrary) {
colocateIndex.addBackendsPerBucketSeq(groupId, backendsPerBucketSeq);
ColocatePersistInfo info = ColocatePersistInfo.createForBackendsPerBucketSeq(groupId, backendsPerBucketSeq);
Env.getCurrentEnv().getEditLog().logColocateBackendsPerBucketSeq(info);
}
}
/*
* generate and check columns' order and key's existence
*/
private void validateColumns(List<Column> columns) throws DdlException {
if (columns.isEmpty()) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_MUST_HAVE_COLUMNS);
}
boolean encounterValue = false;
boolean hasKey = false;
for (Column column : columns) {
if (column.isKey()) {
if (encounterValue) {
ErrorReport.reportDdlException(ErrorCode.ERR_OLAP_KEY_MUST_BEFORE_VALUE);
}
hasKey = true;
} else {
encounterValue = true;
}
}
if (!hasKey) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_MUST_HAVE_KEYS);
}
}
/*
* Truncate specified table or partitions.
* The main idea is:
*
* 1. using the same schema to create new table(partitions)
* 2. use the new created table(partitions) to replace the old ones.
*
* if no partition specified, it will truncate all partitions of this table, including all temp partitions,
* otherwise, it will only truncate those specified partitions.
*
*/
public void truncateTable(TruncateTableStmt truncateTableStmt) throws DdlException {
TableRef tblRef = truncateTableStmt.getTblRef();
TableName dbTbl = tblRef.getName();
Map<String, Long> origPartitions = Maps.newHashMap();
Map<Long, DistributionInfo> partitionsDistributionInfo = Maps.newHashMap();
OlapTable copiedTbl;
boolean truncateEntireTable = tblRef.getPartitionNames() == null;
Database db = (Database) getDbOrDdlException(dbTbl.getDb());
OlapTable olapTable = db.getOlapTableOrDdlException(dbTbl.getTbl());
olapTable.readLock();
try {
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table' state is not NORMAL: " + olapTable.getState());
}
if (!truncateEntireTable) {
for (String partName : tblRef.getPartitionNames().getPartitionNames()) {
Partition partition = olapTable.getPartition(partName);
if (partition == null) {
throw new DdlException("Partition " + partName + " does not exist");
}
origPartitions.put(partName, partition.getId());
partitionsDistributionInfo.put(partition.getId(), partition.getDistributionInfo());
}
} else {
for (Partition partition : olapTable.getPartitions()) {
origPartitions.put(partition.getName(), partition.getId());
partitionsDistributionInfo.put(partition.getId(), partition.getDistributionInfo());
}
}
copiedTbl = olapTable.selectiveCopy(origPartitions.keySet(), IndexExtState.VISIBLE, false);
} finally {
olapTable.readUnlock();
}
List<Partition> newPartitions = Lists.newArrayList();
Set<Long> tabletIdSet = Sets.newHashSet();
long bufferSize = IdGeneratorUtil.getBufferSize(copiedTbl, origPartitions.values());
IdGeneratorBuffer idGeneratorBuffer = Env.getCurrentEnv().getIdGeneratorBuffer(bufferSize);
try {
for (Map.Entry<String, Long> entry : origPartitions.entrySet()) {
long oldPartitionId = entry.getValue();
long newPartitionId = idGeneratorBuffer.getNextId();
Partition newPartition = createPartitionWithIndices(db.getClusterName(), db.getId(), copiedTbl.getId(),
copiedTbl.getBaseIndexId(), newPartitionId, entry.getKey(), copiedTbl.getIndexIdToMeta(),
partitionsDistributionInfo.get(oldPartitionId),
copiedTbl.getPartitionInfo().getDataProperty(oldPartitionId).getStorageMedium(),
copiedTbl.getPartitionInfo().getReplicaAllocation(oldPartitionId), null /* version info */,
copiedTbl.getCopiedBfColumns(), copiedTbl.getBfFpp(), tabletIdSet, copiedTbl.getCopiedIndexes(),
copiedTbl.isInMemory(), copiedTbl.getStorageFormat(),
copiedTbl.getPartitionInfo().getTabletType(oldPartitionId), copiedTbl.getCompressionType(),
copiedTbl.getDataSortInfo(), copiedTbl.getEnableUniqueKeyMergeOnWrite(),
olapTable.getStoragePolicy(), idGeneratorBuffer);
newPartitions.add(newPartition);
}
} catch (DdlException e) {
for (Long tabletId : tabletIdSet) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
throw e;
}
Preconditions.checkState(origPartitions.size() == newPartitions.size());
olapTable = (OlapTable) db.getTableOrDdlException(copiedTbl.getId());
olapTable.writeLockOrDdlException();
try {
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table' state is not NORMAL: " + olapTable.getState());
}
for (Map.Entry<String, Long> entry : origPartitions.entrySet()) {
Partition partition = copiedTbl.getPartition(entry.getValue());
if (partition == null || !partition.getName().equalsIgnoreCase(entry.getKey())) {
throw new DdlException("Partition [" + entry.getKey() + "] is changed");
}
}
boolean metaChanged = false;
if (olapTable.getIndexNameToId().size() != copiedTbl.getIndexNameToId().size()) {
metaChanged = true;
} else {
Map<Long, Integer> copiedIndexIdToSchemaHash = copiedTbl.getIndexIdToSchemaHash();
for (Map.Entry<Long, Integer> entry : olapTable.getIndexIdToSchemaHash().entrySet()) {
long indexId = entry.getKey();
if (!copiedIndexIdToSchemaHash.containsKey(indexId)) {
metaChanged = true;
break;
}
if (!copiedIndexIdToSchemaHash.get(indexId).equals(entry.getValue())) {
metaChanged = true;
break;
}
}
}
if (metaChanged) {
throw new DdlException("Table[" + copiedTbl.getName() + "]'s meta has been changed. try again.");
}
truncateTableInternal(olapTable, newPartitions, truncateEntireTable);
TruncateTableInfo info = new TruncateTableInfo(db.getId(), olapTable.getId(), newPartitions,
truncateEntireTable);
Env.getCurrentEnv().getEditLog().logTruncateTable(info);
} finally {
olapTable.writeUnlock();
}
LOG.info("finished to truncate table {}, partitions: {}", tblRef.getName().toSql(), tblRef.getPartitionNames());
}
private void truncateTableInternal(OlapTable olapTable, List<Partition> newPartitions, boolean isEntireTable) {
Set<Long> oldTabletIds = Sets.newHashSet();
for (Partition newPartition : newPartitions) {
Partition oldPartition = olapTable.replacePartition(newPartition);
for (MaterializedIndex index : oldPartition.getMaterializedIndices(IndexExtState.ALL)) {
index.getTablets().stream().forEach(t -> {
oldTabletIds.add(t.getId());
});
}
}
if (isEntireTable) {
olapTable.dropAllTempPartitions();
}
for (Long tabletId : oldTabletIds) {
Env.getCurrentInvertedIndex().deleteTablet(tabletId);
}
}
public void replayTruncateTable(TruncateTableInfo info) throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(info.getDbId());
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTblId(), TableType.OLAP);
olapTable.writeLock();
try {
truncateTableInternal(olapTable, info.getPartitions(), info.isEntireTable());
if (!Env.isCheckpointThread()) {
TabletInvertedIndex invertedIndex = Env.getCurrentInvertedIndex();
for (Partition partition : info.getPartitions()) {
long partitionId = partition.getId();
TStorageMedium medium = olapTable.getPartitionInfo().getDataProperty(partitionId)
.getStorageMedium();
for (MaterializedIndex mIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexId = mIndex.getId();
int schemaHash = olapTable.getSchemaHashByIndexId(indexId);
TabletMeta tabletMeta = new TabletMeta(db.getId(), olapTable.getId(), partitionId, indexId,
schemaHash, medium);
for (Tablet tablet : mIndex.getTablets()) {
long tabletId = tablet.getId();
invertedIndex.addTablet(tabletId, tabletMeta);
for (Replica replica : tablet.getReplicas()) {
invertedIndex.addReplica(tabletId, replica);
}
}
}
}
}
} finally {
olapTable.writeUnlock();
}
}
public void replayAlterExternalTableSchema(String dbName, String tableName, List<Column> newSchema)
throws MetaNotFoundException {
Database db = (Database) getDbOrMetaException(dbName);
Table table = db.getTableOrMetaException(tableName);
table.writeLock();
try {
table.setNewFullSchema(newSchema);
} finally {
table.writeUnlock();
}
}
public void clearDbs() {
if (idToDb != null) {
idToDb.clear();
}
if (fullNameToDb != null) {
fullNameToDb.clear();
}
}
/**
* create cluster
*
* @param stmt
* @throws DdlException
*/
public void createCluster(CreateClusterStmt stmt) throws DdlException {
final String clusterName = stmt.getClusterName();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (nameToCluster.containsKey(clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_HAS_EXIST, clusterName);
} else {
List<Long> backendList = Env.getCurrentSystemInfo().createCluster(clusterName, stmt.getInstanceNum());
if (backendList != null || stmt.getInstanceNum() == 0) {
final long id = Env.getCurrentEnv().getNextId();
final Cluster cluster = new Cluster(clusterName, id);
cluster.setBackendIdList(backendList);
unprotectCreateCluster(cluster);
if (clusterName.equals(SystemInfoService.DEFAULT_CLUSTER)) {
for (Database db : idToDb.values()) {
if (db.getClusterName().equals(SystemInfoService.DEFAULT_CLUSTER)) {
cluster.addDb(db.getFullName(), db.getId());
}
}
}
Env.getCurrentEnv().getEditLog().logCreateCluster(cluster);
LOG.info("finish to create cluster: {}", clusterName);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_BE_NOT_ENOUGH);
}
}
} finally {
unlock();
}
UserIdentity adminUser = new UserIdentity(PaloAuth.ADMIN_USER, "%");
try {
adminUser.analyze(stmt.getClusterName());
} catch (AnalysisException e) {
LOG.error("should not happen", e);
}
Env.getCurrentEnv().getAuth().createUser(new CreateUserStmt(new UserDesc(adminUser, "", true)));
}
private void unprotectCreateCluster(Cluster cluster) {
for (Long id : cluster.getBackendIdList()) {
final Backend backend = Env.getCurrentSystemInfo().getBackend(id);
backend.setOwnerClusterName(cluster.getName());
backend.setBackendState(BackendState.using);
}
idToCluster.put(cluster.getId(), cluster);
nameToCluster.put(cluster.getName(), cluster);
final InfoSchemaDb infoDb = new InfoSchemaDb(cluster.getName());
infoDb.setClusterName(cluster.getName());
unprotectCreateDb(infoDb);
if (cluster.getName().equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
Env.getCurrentEnv().setDefaultClusterCreated(true);
}
}
/**
* replay create cluster
*
* @param cluster
*/
public void replayCreateCluster(Cluster cluster) {
tryLock(true);
try {
unprotectCreateCluster(cluster);
} finally {
unlock();
}
}
/**
* drop cluster and cluster's db must be have deleted
*
* @param stmt
* @throws DdlException
*/
public void dropCluster(DropClusterStmt stmt) throws DdlException {
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
final String clusterName = stmt.getClusterName();
final Cluster cluster = nameToCluster.get(clusterName);
if (cluster == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
final List<Backend> backends = Env.getCurrentSystemInfo().getClusterBackends(clusterName);
for (Backend backend : backends) {
if (backend.isDecommissioned()) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_ALTER_BE_IN_DECOMMISSION, clusterName);
}
}
if (cluster.getDbNames().size() > 1) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DELETE_DB_EXIST, clusterName);
}
Env.getCurrentSystemInfo().releaseBackends(clusterName, false /* is not replay */);
final ClusterInfo info = new ClusterInfo(clusterName, cluster.getId());
unprotectDropCluster(info, false /* is not replay */);
Env.getCurrentEnv().getEditLog().logDropCluster(info);
} finally {
unlock();
}
Env.getCurrentEnv().getAuth().dropUserOfCluster(stmt.getClusterName(), true /* is replay */);
}
private void unprotectDropCluster(ClusterInfo info, boolean isReplay) {
Env.getCurrentSystemInfo().releaseBackends(info.getClusterName(), isReplay);
idToCluster.remove(info.getClusterId());
nameToCluster.remove(info.getClusterName());
final Database infoSchemaDb = fullNameToDb.get(InfoSchemaDb.getFullInfoSchemaDbName(info.getClusterName()));
fullNameToDb.remove(infoSchemaDb.getFullName());
idToDb.remove(infoSchemaDb.getId());
}
public void replayDropCluster(ClusterInfo info) throws DdlException {
tryLock(true);
try {
unprotectDropCluster(info, true/* is replay */);
} finally {
unlock();
}
Env.getCurrentEnv().getAuth().dropUserOfCluster(info.getClusterName(), true /* is replay */);
}
public void replayExpandCluster(ClusterInfo info) {
tryLock(true);
try {
final Cluster cluster = nameToCluster.get(info.getClusterName());
cluster.addBackends(info.getBackendIdList());
for (Long beId : info.getBackendIdList()) {
Backend be = Env.getCurrentSystemInfo().getBackend(beId);
if (be == null) {
continue;
}
be.setOwnerClusterName(info.getClusterName());
be.setBackendState(BackendState.using);
}
} finally {
unlock();
}
}
/**
* modify cluster: Expansion or shrink
*
* @param stmt
* @throws DdlException
*/
public void processModifyCluster(AlterClusterStmt stmt) throws UserException {
final String clusterName = stmt.getAlterClusterName();
final int newInstanceNum = stmt.getInstanceNum();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
Cluster cluster = nameToCluster.get(clusterName);
if (cluster == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
final List<Long> backendIdsInCluster = cluster.getBackendIdList();
for (Long beId : backendIdsInCluster) {
Backend be = Env.getCurrentSystemInfo().getBackend(beId);
if (be.isDecommissioned()) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_ALTER_BE_IN_DECOMMISSION, clusterName);
}
}
final int oldInstanceNum = backendIdsInCluster.size();
if (newInstanceNum > oldInstanceNum) {
final List<Long> expandBackendIds = Env.getCurrentSystemInfo()
.calculateExpansionBackends(clusterName, newInstanceNum - oldInstanceNum);
if (expandBackendIds == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_BE_NOT_ENOUGH);
}
cluster.addBackends(expandBackendIds);
final ClusterInfo info = new ClusterInfo(clusterName, cluster.getId(), expandBackendIds);
Env.getCurrentEnv().getEditLog().logExpandCluster(info);
} else if (newInstanceNum < oldInstanceNum) {
final List<Long> decomBackendIds = Env.getCurrentSystemInfo()
.calculateDecommissionBackends(clusterName, oldInstanceNum - newInstanceNum);
if (decomBackendIds == null || decomBackendIds.size() == 0) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_BACKEND_ERROR);
}
List<String> hostPortList = Lists.newArrayList();
for (Long id : decomBackendIds) {
final Backend backend = Env.getCurrentSystemInfo().getBackend(id);
hostPortList.add(
new StringBuilder().append(backend.getHost()).append(":").append(backend.getHeartbeatPort())
.toString());
}
final DecommissionBackendClause clause = new DecommissionBackendClause(hostPortList);
try {
clause.analyze(null);
clause.setType(DecommissionType.ClusterDecommission);
AlterSystemStmt alterStmt = new AlterSystemStmt(clause);
alterStmt.setClusterName(clusterName);
Env.getCurrentEnv().getAlterInstance().processAlterCluster(alterStmt);
} catch (AnalysisException e) {
Preconditions.checkState(false, "should not happened: " + e.getMessage());
}
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_ALTER_BE_NO_CHANGE, newInstanceNum);
}
} finally {
unlock();
}
}
/**
* @param ctx
* @param clusterName
* @throws DdlException
*/
public void changeCluster(ConnectContext ctx, String clusterName) throws DdlException {
if (!Env.getCurrentEnv().getAuth().checkCanEnterCluster(ConnectContext.get(), clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_AUTHORITY, ConnectContext.get().getQualifiedUser(),
"enter");
}
if (!nameToCluster.containsKey(clusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_NO_EXISTS, clusterName);
}
ctx.setCluster(clusterName);
}
/**
* migrate db to link dest cluster
*
* @param stmt
* @throws DdlException
*/
public void migrateDb(MigrateDbStmt stmt) throws DdlException {
final String srcClusterName = stmt.getSrcCluster();
final String destClusterName = stmt.getDestCluster();
final String srcDbName = stmt.getSrcDb();
final String destDbName = stmt.getDestDb();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!nameToCluster.containsKey(srcClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_CLUSTER_NOT_EXIST, srcClusterName);
}
if (!nameToCluster.containsKey(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DEST_CLUSTER_NOT_EXIST, destClusterName);
}
if (srcClusterName.equals(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATE_SAME_CLUSTER);
}
final Cluster srcCluster = this.nameToCluster.get(srcClusterName);
if (!srcCluster.containDb(srcDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_DB_NOT_EXIST, srcDbName);
}
final Cluster destCluster = this.nameToCluster.get(destClusterName);
if (!destCluster.containLink(destDbName, srcDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATION_NO_LINK, srcDbName, destDbName);
}
final Database db = fullNameToDb.get(srcDbName);
final int maxReplicationNum = db.getMaxReplicationNum();
if (maxReplicationNum > destCluster.getBackendIdList().size()) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATE_BE_NOT_ENOUGH, destClusterName);
}
if (db.getDbState() == DbState.LINK) {
final BaseParam param = new BaseParam();
param.addStringParam(destDbName);
param.addLongParam(db.getId());
param.addStringParam(srcDbName);
param.addStringParam(destClusterName);
param.addStringParam(srcClusterName);
fullNameToDb.remove(db.getFullName());
srcCluster.removeDb(db.getFullName(), db.getId());
destCluster.removeLinkDb(param);
destCluster.addDb(destDbName, db.getId());
db.writeLock();
try {
db.setDbState(DbState.MOVE);
db.setClusterName(destClusterName);
db.setName(destDbName);
db.setAttachDb(srcDbName);
} finally {
db.writeUnlock();
}
Env.getCurrentEnv().getEditLog().logMigrateCluster(param);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATION_NO_LINK, srcDbName, destDbName);
}
} finally {
unlock();
}
}
public void replayMigrateDb(BaseParam param) {
final String desDbName = param.getStringParam();
final String srcDbName = param.getStringParam(1);
final String desClusterName = param.getStringParam(2);
final String srcClusterName = param.getStringParam(3);
tryLock(true);
try {
final Cluster desCluster = this.nameToCluster.get(desClusterName);
final Cluster srcCluster = this.nameToCluster.get(srcClusterName);
final Database db = fullNameToDb.get(srcDbName);
if (db.getDbState() == DbState.LINK) {
fullNameToDb.remove(db.getFullName());
srcCluster.removeDb(db.getFullName(), db.getId());
desCluster.removeLinkDb(param);
desCluster.addDb(param.getStringParam(), db.getId());
db.writeLock();
db.setName(desDbName);
db.setAttachDb(srcDbName);
db.setDbState(DbState.MOVE);
db.setClusterName(desClusterName);
db.writeUnlock();
}
} finally {
unlock();
}
}
public void replayLinkDb(BaseParam param) {
final String desClusterName = param.getStringParam(2);
final String srcDbName = param.getStringParam(1);
final String desDbName = param.getStringParam();
tryLock(true);
try {
final Cluster desCluster = this.nameToCluster.get(desClusterName);
final Database srcDb = fullNameToDb.get(srcDbName);
srcDb.writeLock();
srcDb.setDbState(DbState.LINK);
srcDb.setAttachDb(desDbName);
srcDb.writeUnlock();
desCluster.addLinkDb(param);
fullNameToDb.put(desDbName, srcDb);
} finally {
unlock();
}
}
/**
* link src db to dest db. we use java's quotation Mechanism to realize db hard links
*
* @param stmt
* @throws DdlException
*/
public void linkDb(LinkDbStmt stmt) throws DdlException {
final String srcClusterName = stmt.getSrcCluster();
final String destClusterName = stmt.getDestCluster();
final String srcDbName = stmt.getSrcDb();
final String destDbName = stmt.getDestDb();
if (!tryLock(false)) {
throw new DdlException("Failed to acquire catalog lock. Try again");
}
try {
if (!nameToCluster.containsKey(srcClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_CLUSTER_NOT_EXIST, srcClusterName);
}
if (!nameToCluster.containsKey(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DEST_CLUSTER_NOT_EXIST, destClusterName);
}
if (srcClusterName.equals(destClusterName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_MIGRATE_SAME_CLUSTER);
}
if (fullNameToDb.containsKey(destDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_DB_CREATE_EXISTS, destDbName);
}
final Cluster srcCluster = this.nameToCluster.get(srcClusterName);
final Cluster destCluster = this.nameToCluster.get(destClusterName);
if (!srcCluster.containDb(srcDbName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_SRC_DB_NOT_EXIST, srcDbName);
}
final Database srcDb = fullNameToDb.get(srcDbName);
if (srcDb.getDbState() != DbState.NORMAL) {
ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE,
ClusterNamespace.getNameFromFullName(srcDbName));
}
srcDb.writeLock();
try {
srcDb.setDbState(DbState.LINK);
srcDb.setAttachDb(destDbName);
} finally {
srcDb.writeUnlock();
}
final long id = Env.getCurrentEnv().getNextId();
final BaseParam param = new BaseParam();
param.addStringParam(destDbName);
param.addStringParam(srcDbName);
param.addLongParam(id);
param.addLongParam(srcDb.getId());
param.addStringParam(destClusterName);
param.addStringParam(srcClusterName);
destCluster.addLinkDb(param);
fullNameToDb.put(destDbName, srcDb);
Env.getCurrentEnv().getEditLog().logLinkCluster(param);
} finally {
unlock();
}
}
public Cluster getCluster(String clusterName) {
return nameToCluster.get(clusterName);
}
public List<String> getClusterNames() {
return new ArrayList<String>(nameToCluster.keySet());
}
/**
* get migrate progress , when finish migration, next cloneCheck will reset dbState
*
* @return
*/
public Set<BaseParam> getMigrations() {
final Set<BaseParam> infos = Sets.newHashSet();
for (Database db : fullNameToDb.values()) {
db.readLock();
try {
if (db.getDbState() == DbState.MOVE) {
int tabletTotal = 0;
int tabletQuorum = 0;
final Set<Long> beIds = Sets.newHashSet(
Env.getCurrentSystemInfo().getClusterBackendIds(db.getClusterName()));
final Set<String> tableNames = db.getTableNamesWithLock();
for (String tableName : tableNames) {
Table table = db.getTableNullable(tableName);
if (table == null || table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
for (Partition partition : olapTable.getPartitions()) {
ReplicaAllocation replicaAlloc = olapTable.getPartitionInfo()
.getReplicaAllocation(partition.getId());
short totalReplicaNum = replicaAlloc.getTotalReplicaNum();
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(
IndexExtState.ALL)) {
if (materializedIndex.getState() != IndexState.NORMAL) {
continue;
}
for (Tablet tablet : materializedIndex.getTablets()) {
int replicaNum = 0;
int quorum = totalReplicaNum / 2 + 1;
for (Replica replica : tablet.getReplicas()) {
if (replica.getState() != ReplicaState.CLONE && beIds.contains(
replica.getBackendId())) {
replicaNum++;
}
}
if (replicaNum > quorum) {
replicaNum = quorum;
}
tabletQuorum = tabletQuorum + replicaNum;
tabletTotal = tabletTotal + quorum;
}
}
}
} finally {
olapTable.readUnlock();
}
}
final BaseParam info = new BaseParam();
info.addStringParam(db.getClusterName());
info.addStringParam(db.getAttachDb());
info.addStringParam(db.getFullName());
final float percentage = tabletTotal > 0 ? (float) tabletQuorum / (float) tabletTotal : 0f;
info.addFloatParam(percentage);
infos.add(info);
}
} finally {
db.readUnlock();
}
}
return infos;
}
public long loadCluster(DataInputStream dis, long checksum) throws IOException, DdlException {
int clusterCount = dis.readInt();
checksum ^= clusterCount;
for (long i = 0; i < clusterCount; ++i) {
final Cluster cluster = Cluster.read(dis);
checksum ^= cluster.getId();
List<Long> latestBackendIds = Env.getCurrentSystemInfo().getClusterBackendIds(cluster.getName());
if (latestBackendIds.size() != cluster.getBackendIdList().size()) {
LOG.warn(
"Cluster:" + cluster.getName() + ", backends in Cluster is " + cluster.getBackendIdList().size()
+ ", backends in SystemInfoService is " + cluster.getBackendIdList().size());
}
cluster.setBackendIdList(latestBackendIds);
String dbName = InfoSchemaDb.getFullInfoSchemaDbName(cluster.getName());
InfoSchemaDb db = (InfoSchemaDb) Env.getServingEnv().getInternalDataSource().getDbNullable(dbName);
if (db == null) {
db = new InfoSchemaDb(cluster.getName());
db.setClusterName(cluster.getName());
}
String errMsg = "InfoSchemaDb id shouldn't larger than 10000, please restart your FE server";
Preconditions.checkState(db.getId() < Env.NEXT_ID_INIT_VALUE, errMsg);
idToDb.put(db.getId(), db);
fullNameToDb.put(db.getFullName(), db);
cluster.addDb(dbName, db.getId());
idToCluster.put(cluster.getId(), cluster);
nameToCluster.put(cluster.getName(), cluster);
}
LOG.info("finished replay cluster from image");
return checksum;
}
public void initDefaultCluster() {
final List<Long> backendList = Lists.newArrayList();
final List<Backend> defaultClusterBackends = Env.getCurrentSystemInfo()
.getClusterBackends(SystemInfoService.DEFAULT_CLUSTER);
for (Backend backend : defaultClusterBackends) {
backendList.add(backend.getId());
}
final long id = Env.getCurrentEnv().getNextId();
final Cluster cluster = new Cluster(SystemInfoService.DEFAULT_CLUSTER, id);
Set<String> beHost = Sets.newHashSet();
for (Backend be : defaultClusterBackends) {
if (beHost.contains(be.getHost())) {
LOG.error("found more than one backends in same host: {}", be.getHost());
System.exit(-1);
} else {
beHost.add(be.getHost());
}
}
cluster.setBackendIdList(backendList);
unprotectCreateCluster(cluster);
for (Database db : idToDb.values()) {
db.setClusterName(SystemInfoService.DEFAULT_CLUSTER);
cluster.addDb(db.getFullName(), db.getId());
}
Env.getCurrentEnv().setDefaultClusterCreated(true);
Env.getCurrentEnv().getEditLog().logCreateCluster(cluster);
}
public void replayUpdateDb(DatabaseInfo info) {
final Database db = fullNameToDb.get(info.getDbName());
db.setClusterName(info.getClusterName());
db.setDbState(info.getDbState());
}
public long saveCluster(CountingDataOutputStream dos, long checksum) throws IOException {
final int clusterCount = idToCluster.size();
checksum ^= clusterCount;
dos.writeInt(clusterCount);
for (Map.Entry<Long, Cluster> entry : idToCluster.entrySet()) {
long clusterId = entry.getKey();
if (clusterId >= Env.NEXT_ID_INIT_VALUE) {
checksum ^= clusterId;
final Cluster cluster = entry.getValue();
cluster.write(dos);
}
}
return checksum;
}
public void replayUpdateClusterAndBackends(BackendIdsUpdateInfo info) {
for (long id : info.getBackendList()) {
final Backend backend = Env.getCurrentSystemInfo().getBackend(id);
final Cluster cluster = nameToCluster.get(backend.getOwnerClusterName());
cluster.removeBackend(id);
backend.setDecommissioned(false);
backend.clearClusterName();
backend.setBackendState(BackendState.free);
}
}
public List<String> getClusterDbNames(String clusterName) throws AnalysisException {
final Cluster cluster = nameToCluster.get(clusterName);
if (cluster == null) {
throw new AnalysisException("No cluster selected");
}
return Lists.newArrayList(cluster.getDbNames());
}
public long saveDb(CountingDataOutputStream dos, long checksum) throws IOException {
int dbCount = idToDb.size() - nameToCluster.keySet().size();
checksum ^= dbCount;
dos.writeInt(dbCount);
for (Map.Entry<Long, Database> entry : idToDb.entrySet()) {
Database db = entry.getValue();
String dbName = db.getFullName();
if (!InfoSchemaDb.isInfoSchemaDb(dbName)) {
checksum ^= entry.getKey();
db.write(dos);
}
}
return checksum;
}
public long loadDb(DataInputStream dis, long checksum) throws IOException, DdlException {
int dbCount = dis.readInt();
long newChecksum = checksum ^ dbCount;
for (long i = 0; i < dbCount; ++i) {
Database db = new Database();
db.readFields(dis);
newChecksum ^= db.getId();
idToDb.put(db.getId(), db);
fullNameToDb.put(db.getFullName(), db);
if (db.getDbState() == DbState.LINK) {
fullNameToDb.put(db.getAttachDb(), db);
}
Env.getCurrentGlobalTransactionMgr().addDatabaseTransactionMgr(db.getId());
}
recreateTabletInvertIndex();
getEsRepository().loadTableFromCatalog();
LOG.info("finished replay databases from image");
return newChecksum;
}
} |
according to the silimar code in `computeFragmentExecParams`, this code block is to want to decide how to shuffle data to dest by whether it is broadcast join's right child. Since `MultiCastFragment` will never be broadcast join's right child. I think the right way to fix this problem is removing if branch. | private void computeMultiCastFragmentParams() throws Exception {
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (!(params.fragment instanceof MultiCastPlanFragment)) {
continue;
}
MultiCastPlanFragment multi = (MultiCastPlanFragment) params.fragment;
Preconditions.checkState(multi.getSink() instanceof MultiCastDataSink);
MultiCastDataSink multiSink = (MultiCastDataSink) multi.getSink();
for (int i = 0; i < multi.getDestFragmentList().size(); i++) {
PlanFragment destFragment = multi.getDestFragmentList().get(i);
DataStreamSink sink = multiSink.getDataStreamSinks().get(i);
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
multi.getDestFragmentList().get(i).setOutputPartition(params.fragment.getOutputPartition());
PlanNodeId exchId = sink.getExchNodeId();
Preconditions.checkState(!destParams.perExchNumSenders.containsKey(exchId.asInt()));
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
List<TPlanFragmentDestination> destinations = multiSink.getDestinations().get(i);
if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& destFragment.isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.brpc_server = toBrpcHost(destParams.instanceExecParams.get(j).host);
destinations.add(dest);
}
}
}
}
} | && destFragment.isRightChildOfBroadcastHashJoin()) { | private void computeMultiCastFragmentParams() throws Exception {
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (!(params.fragment instanceof MultiCastPlanFragment)) {
continue;
}
MultiCastPlanFragment multi = (MultiCastPlanFragment) params.fragment;
Preconditions.checkState(multi.getSink() instanceof MultiCastDataSink);
MultiCastDataSink multiSink = (MultiCastDataSink) multi.getSink();
for (int i = 0; i < multi.getDestFragmentList().size(); i++) {
PlanFragment destFragment = multi.getDestFragmentList().get(i);
DataStreamSink sink = multiSink.getDataStreamSinks().get(i);
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
multi.getDestFragmentList().get(i).setOutputPartition(params.fragment.getOutputPartition());
PlanNodeId exchId = sink.getExchNodeId();
PlanNode exchNode = PlanNode.findPlanNodeFromPlanNodeId(destFragment.getPlanRoot(), exchId);
Preconditions.checkState(!destParams.perExchNumSenders.containsKey(exchId.asInt()));
Preconditions.checkState(exchNode != null, "exchNode is null");
Preconditions.checkState(exchNode instanceof ExchangeNode,
"exchNode is not ExchangeNode" + exchNode.getId().toString());
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
List<TPlanFragmentDestination> destinations = multiSink.getDestinations().get(i);
if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& ((ExchangeNode) exchNode).isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.brpc_server = toBrpcHost(destParams.instanceExecParams.get(j).host);
destinations.add(dest);
}
}
}
}
} | class Coordinator {
private static final Logger LOG = LogManager.getLogger(Coordinator.class);
private static final String localIP = FrontendOptions.getLocalHostAddress();
private static final Random instanceRandom = new Random();
Status queryStatus = new Status();
Map<TNetworkAddress, Long> addressToBackendID = Maps.newHashMap();
private ImmutableMap<Long, Backend> idToBackend = ImmutableMap.of();
private final TDescriptorTable descTable;
private Map<Integer, TFileScanRangeParams> fileScanRangeParamsMap = Maps.newHashMap();
private final TQueryGlobals queryGlobals = new TQueryGlobals();
private TQueryOptions queryOptions;
private TNetworkAddress coordAddress;
private final Lock lock = new ReentrantLock();
private boolean returnedAllResults;
private final Map<PlanFragmentId, FragmentExecParams> fragmentExecParamsMap = Maps.newHashMap();
private final List<PlanFragment> fragments;
private int instanceTotalNum;
private Map<Long, BackendExecStates> beToExecStates = Maps.newHashMap();
private Map<Long, PipelineExecContexts> beToPipelineExecCtxs = Maps.newHashMap();
private final List<BackendExecState> backendExecStates = Lists.newArrayList();
private final Map<Pair<Integer, Long>, PipelineExecContext> pipelineExecContexts = new HashMap<>();
private final List<BackendExecState> needCheckBackendExecStates = Lists.newArrayList();
private final List<PipelineExecContext> needCheckPipelineExecContexts = Lists.newArrayList();
private ResultReceiver receiver;
private final List<ScanNode> scanNodes;
private int scanRangeNum = 0;
private final Set<TUniqueId> instanceIds = Sets.newHashSet();
private final boolean isBlockQuery;
private int numReceivedRows = 0;
private List<String> deltaUrls;
private Map<String, String> loadCounters;
private String trackingUrl;
private List<String> exportFiles;
private final List<TTabletCommitInfo> commitInfos = Lists.newArrayList();
private final List<TErrorTabletInfo> errorTabletInfos = Lists.newArrayList();
private long jobId = -1;
private TUniqueId queryId;
private final boolean needReport;
private final TUniqueId nextInstanceId;
private long timeoutDeadline;
private boolean enableShareHashTableForBroadcastJoin = false;
private boolean enablePipelineEngine = false;
public TNetworkAddress runtimeFilterMergeAddr;
public TUniqueId runtimeFilterMergeInstanceId;
public Map<RuntimeFilterId, List<FRuntimeFilterTargetParam>> ridToTargetParam = Maps.newHashMap();
public List<RuntimeFilter> assignedRuntimeFilters = new ArrayList<>();
public Map<RuntimeFilterId, Integer> ridToBuilderNum = Maps.newHashMap();
private boolean isPointQuery = false;
private PointQueryExec pointExec = null;
private StatsErrorEstimator statsErrorEstimator;
public void setTWorkloadGroups(List<TPipelineWorkloadGroup> tWorkloadGroups) {
this.tWorkloadGroups = tWorkloadGroups;
}
private List<TPipelineWorkloadGroup> tWorkloadGroups = Lists.newArrayList();
private final ExecutionProfile executionProfile;
public ExecutionProfile getExecutionProfile() {
return executionProfile;
}
private boolean isAllExternalScan = true;
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner,
StatsErrorEstimator statsErrorEstimator) {
this(context, analyzer, planner);
this.statsErrorEstimator = statsErrorEstimator;
}
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner) {
this.isBlockQuery = planner.isBlockQuery();
this.queryId = context.queryId();
this.fragments = planner.getFragments();
this.scanNodes = planner.getScanNodes();
if (this.scanNodes.size() == 1 && this.scanNodes.get(0) instanceof OlapScanNode) {
OlapScanNode olapScanNode = (OlapScanNode) (this.scanNodes.get(0));
isPointQuery = olapScanNode.isPointQuery();
if (isPointQuery) {
PlanFragment fragment = fragments.get(0);
LOG.debug("execPointGet fragment {}", fragment);
OlapScanNode planRoot = (OlapScanNode) fragment.getPlanRoot();
Preconditions.checkNotNull(planRoot);
pointExec = new PointQueryExec(planRoot.getPointQueryEqualPredicates(),
planRoot.getDescTable(), fragment.getOutputExprs());
}
}
PrepareStmt prepareStmt = analyzer == null ? null : analyzer.getPrepareStmt();
if (prepareStmt != null) {
this.descTable = prepareStmt.getDescTable();
if (pointExec != null) {
pointExec.setCacheID(prepareStmt.getID());
pointExec.setSerializedDescTable(prepareStmt.getSerializedDescTable());
pointExec.setSerializedOutputExpr(prepareStmt.getSerializedOutputExprs());
pointExec.setBinaryProtocol(prepareStmt.isBinaryProtocol());
}
} else {
this.descTable = planner.getDescTable().toThrift();
}
this.returnedAllResults = false;
this.enableShareHashTableForBroadcastJoin = context.getSessionVariable().enableShareHashTableForBroadcastJoin;
this.enablePipelineEngine = context.getSessionVariable().getEnablePipelineEngine()
&& (fragments.size() > 0 && fragments.get(0).getSink() instanceof ResultSink);
initQueryOptions(context);
setFromUserProperty(context);
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setNanoSeconds(LocalDateTime.now().getNano());
this.queryGlobals.setLoadZeroTolerance(false);
if (context.getSessionVariable().getTimeZone().equals("CST")) {
this.queryGlobals.setTimeZone(TimeUtils.DEFAULT_TIME_ZONE);
} else {
this.queryGlobals.setTimeZone(context.getSessionVariable().getTimeZone());
}
this.needReport = context.getSessionVariable().enableProfile();
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.assignedRuntimeFilters = planner.getRuntimeFilters();
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
}
public Coordinator(Long jobId, TUniqueId queryId, DescriptorTable descTable, List<PlanFragment> fragments,
List<ScanNode> scanNodes, String timezone, boolean loadZeroTolerance) {
this.isBlockQuery = true;
this.jobId = jobId;
this.queryId = queryId;
this.descTable = descTable.toThrift();
this.fragments = fragments;
this.scanNodes = scanNodes;
this.queryOptions = new TQueryOptions();
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setTimeZone(timezone);
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.needReport = true;
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
}
private void setFromUserProperty(ConnectContext connectContext) {
String qualifiedUser = connectContext.getQualifiedUser();
int cpuLimit = Env.getCurrentEnv().getAuth().getCpuResourceLimit(qualifiedUser);
if (cpuLimit > 0) {
TResourceLimit resourceLimit = new TResourceLimit();
resourceLimit.setCpuLimit(cpuLimit);
this.queryOptions.setResourceLimit(resourceLimit);
}
long memLimit = Env.getCurrentEnv().getAuth().getExecMemLimit(qualifiedUser);
if (memLimit > 0) {
this.queryOptions.setMemLimit(memLimit);
this.queryOptions.setMaxReservation(memLimit);
this.queryOptions.setInitialReservationTotalClaims(memLimit);
this.queryOptions.setBufferPoolLimit(memLimit);
}
}
private void initQueryOptions(ConnectContext context) {
this.queryOptions = context.getSessionVariable().toThrift();
this.queryOptions.setEnablePipelineEngine(SessionVariable.enablePipelineEngine());
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.queryOptions.setQueryTimeout(context.getExecTimeout());
this.queryOptions.setExecutionTimeout(context.getExecTimeout());
this.queryOptions.setEnableScanNodeRunSerial(context.getSessionVariable().isEnableScanRunSerial());
}
public long getJobId() {
return jobId;
}
public TUniqueId getQueryId() {
return queryId;
}
public int getScanRangeNum() {
return scanRangeNum;
}
public void setQueryId(TUniqueId queryId) {
this.queryId = queryId;
}
public void setQueryType(TQueryType type) {
this.queryOptions.setQueryType(type);
}
public void setExecPipEngine(boolean vec) {
this.queryOptions.setEnablePipelineEngine(vec);
}
public Status getExecStatus() {
return queryStatus;
}
public List<String> getDeltaUrls() {
return deltaUrls;
}
public Map<String, String> getLoadCounters() {
return loadCounters;
}
public String getTrackingUrl() {
return trackingUrl;
}
public void setExecMemoryLimit(long execMemoryLimit) {
this.queryOptions.setMemLimit(execMemoryLimit);
}
public void setLoadMemLimit(long loadMemLimit) {
this.queryOptions.setLoadMemLimit(loadMemLimit);
}
public void setTimeout(int timeout) {
this.queryOptions.setQueryTimeout(timeout);
this.queryOptions.setExecutionTimeout(timeout);
}
public void setLoadZeroTolerance(boolean loadZeroTolerance) {
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
}
public void clearExportStatus() {
lock.lock();
try {
this.backendExecStates.clear();
this.pipelineExecContexts.clear();
this.queryStatus.setStatus(new Status());
if (this.exportFiles == null) {
this.exportFiles = Lists.newArrayList();
}
this.exportFiles.clear();
this.needCheckBackendExecStates.clear();
this.needCheckPipelineExecContexts.clear();
} finally {
lock.unlock();
}
}
public List<TTabletCommitInfo> getCommitInfos() {
return commitInfos;
}
public List<TErrorTabletInfo> getErrorTabletInfos() {
return errorTabletInfos;
}
public Map<String, Integer> getBeToInstancesNum() {
Map<String, Integer> result = Maps.newTreeMap();
if (enablePipelineEngine) {
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
result.put(ctxs.brpcAddr.hostname.concat(":").concat("" + ctxs.brpcAddr.port),
ctxs.getInstanceNumber());
}
} else {
for (BackendExecStates states : beToExecStates.values()) {
result.put(states.brpcAddr.hostname.concat(":").concat("" + states.brpcAddr.port),
states.states.size());
}
}
return result;
}
public int getInstanceTotalNum() {
return instanceTotalNum;
}
private void prepare() {
for (PlanFragment fragment : fragments) {
fragmentExecParamsMap.put(fragment.getFragmentId(), new FragmentExecParams(fragment));
}
for (PlanFragment fragment : fragments) {
if (!(fragment.getSink() instanceof DataStreamSink)) {
continue;
}
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getDestFragment().getFragmentId());
params.inputFragments.add(fragment.getFragmentId());
}
coordAddress = new TNetworkAddress(localIP, Config.rpc_port);
this.idToBackend = Env.getCurrentSystemInfo().getIdToBackend();
if (LOG.isDebugEnabled()) {
LOG.debug("idToBackend size={}", idToBackend.size());
for (Map.Entry<Long, Backend> entry : idToBackend.entrySet()) {
Long backendID = entry.getKey();
Backend backend = entry.getValue();
LOG.debug("backend: {}-{}-{}", backendID, backend.getHost(), backend.getBePort());
}
}
}
private void lock() {
lock.lock();
}
private void unlock() {
lock.unlock();
}
private void traceInstance() {
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
int idx = 0;
sb.append("query id=").append(DebugUtil.printId(queryId)).append(",");
sb.append("fragment=[");
for (Map.Entry<PlanFragmentId, FragmentExecParams> entry : fragmentExecParamsMap.entrySet()) {
if (idx++ != 0) {
sb.append(",");
}
sb.append(entry.getKey());
entry.getValue().appendTo(sb);
}
sb.append("]");
LOG.debug(sb.toString());
}
}
public void exec() throws Exception {
if (LOG.isDebugEnabled() && !scanNodes.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, planNode: {}",
DebugUtil.printId(queryId), scanNodes.get(0).treeToThrift());
}
if (LOG.isDebugEnabled() && !fragments.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, fragment: {}",
DebugUtil.printId(queryId), fragments.get(0).toThrift());
}
prepare();
computeScanRangeAssignment();
computeFragmentExecParams();
traceInstance();
QeProcessorImpl.INSTANCE.registerInstances(queryId, instanceIds.size());
PlanFragmentId topId = fragments.get(0).getFragmentId();
FragmentExecParams topParams = fragmentExecParamsMap.get(topId);
DataSink topDataSink = topParams.fragment.getSink();
this.timeoutDeadline = System.currentTimeMillis() + queryOptions.getExecutionTimeout() * 1000L;
if (topDataSink instanceof ResultSink || topDataSink instanceof ResultFileSink) {
TNetworkAddress execBeAddr = topParams.instanceExecParams.get(0).host;
receiver = new ResultReceiver(queryId, topParams.instanceExecParams.get(0).instanceId,
addressToBackendID.get(execBeAddr), toBrpcHost(execBeAddr), this.timeoutDeadline);
if (LOG.isDebugEnabled()) {
LOG.debug("dispatch query job: {} to {}", DebugUtil.printId(queryId),
topParams.instanceExecParams.get(0).host);
}
if (topDataSink instanceof ResultFileSink
&& ((ResultFileSink) topDataSink).getStorageType() == StorageBackend.StorageType.BROKER) {
ResultFileSink topResultFileSink = (ResultFileSink) topDataSink;
FsBroker broker = Env.getCurrentEnv().getBrokerMgr()
.getBroker(topResultFileSink.getBrokerName(), execBeAddr.getHostname());
topResultFileSink.setBrokerAddr(broker.host, broker.port);
}
} else {
this.queryOptions.setIsReportSuccess(true);
deltaUrls = Lists.newArrayList();
loadCounters = Maps.newHashMap();
List<Long> relatedBackendIds = Lists.newArrayList(addressToBackendID.values());
Env.getCurrentEnv().getLoadManager().initJobProgress(jobId, queryId, instanceIds,
relatedBackendIds);
Env.getCurrentEnv().getProgressManager().addTotalScanNums(String.valueOf(jobId), scanRangeNum);
LOG.info("dispatch load job: {} to {}", DebugUtil.printId(queryId), addressToBackendID.keySet());
}
executionProfile.markInstances(instanceIds);
if (!isPointQuery) {
if (enablePipelineEngine) {
sendPipelineCtx();
} else {
sendFragment();
}
} else {
OlapScanNode planRoot = (OlapScanNode) fragments.get(0).getPlanRoot();
Preconditions.checkState(planRoot.getScanTabletIds().size() == 1);
pointExec.setCandidateBackends(planRoot.getScanBackendIds());
pointExec.setTabletId(planRoot.getScanTabletIds().get(0));
}
}
/**
* The logic for sending query plan fragments is as follows:
* First, plan fragments are dependent. According to the order in "fragments" list,
* it must be ensured that on the BE side, the next fragment instance can be executed
* only after the previous fragment instance is ready,
* <p>
* In the previous logic, we will send fragment instances in sequence through RPC,
* and will wait for the RPC of the previous fragment instance to return successfully
* before sending the next one. But for some complex queries, this may lead to too many RPCs.
* <p>
* The optimized logic is as follows:
* 1. If the number of fragment instance is <= 2, the original logic is still used
* to complete the sending of fragments through at most 2 RPCs.
* 2. If the number of fragment instance is >= 3, first group all fragments by BE,
* and send all fragment instances to the corresponding BE node through the FIRST rpc,
* but these fragment instances will only perform the preparation phase but will not be actually executed.
* After that, the execution logic of all fragment instances is started through the SECOND RPC.
* <p>
* After optimization, a query on a BE node will only send two RPCs at most.
* Thereby reducing the "send fragment timeout" error caused by too many RPCs and BE unable to process in time.
*
* @throws TException
* @throws RpcException
* @throws UserException
*/
private void sendFragment() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
long memoryLimit = queryOptions.getMemLimit();
beToExecStates.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
instanceTotalNum += instanceNum;
List<TExecPlanFragmentParams> tParams = params.toThrift(backendIdx);
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceNum);
long newMemory = memoryLimit / rate;
for (TExecPlanFragmentParams tParam : tParams) {
tParam.query_options.setMemLimit(newMemory);
}
}
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
int instanceId = 0;
for (TExecPlanFragmentParams tParam : tParams) {
BackendExecState execState =
new BackendExecState(fragment.getFragmentId(), instanceId++,
profileFragmentId, tParam, this.addressToBackendID,
executionProfile.getLoadChannelProfile());
tParam.setFragmentNumOnHost(hostCounter.count(execState.address));
tParam.setBackendId(execState.backend.getId());
tParam.setNeedWaitExecutionTrigger(twoPhaseExecution);
backendExecStates.add(execState);
if (needCheckBackendState) {
needCheckBackendExecStates.add(execState);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
execState.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
BackendExecStates states = beToExecStates.get(execState.backend.getId());
if (states == null) {
states = new BackendExecStates(execState.backend.getId(), execState.brpcAddress,
twoPhaseExecution);
beToExecStates.putIfAbsent(execState.backend.getId(), states);
}
states.addState(execState);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<BackendExecStates, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
states.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execRemoteFragmentsAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execPlanFragmentStartAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void sendPipelineCtx() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
beToPipelineExecCtxs.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
Map<TNetworkAddress, TPipelineFragmentParams> tParams = params.toTPipelineParams(backendIdx);
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap = new HashMap<TUniqueId, RuntimeProfile>();
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
for (TPipelineInstanceParams instanceParam : entry.getValue().local_params) {
String name = "Instance " + DebugUtil.printId(instanceParam.fragment_instance_id)
+ " (host=" + entry.getKey() + ")";
fragmentInstancesMap.put(instanceParam.fragment_instance_id, new RuntimeProfile(name));
}
}
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
Long backendId = this.addressToBackendID.get(entry.getKey());
PipelineExecContext pipelineExecContext = new PipelineExecContext(fragment.getFragmentId(),
profileFragmentId, entry.getValue(), backendId, fragmentInstancesMap,
executionProfile.getLoadChannelProfile());
entry.getValue().setFragmentNumOnHost(hostCounter.count(pipelineExecContext.address));
entry.getValue().setBackendId(pipelineExecContext.backend.getId());
entry.getValue().setNeedWaitExecutionTrigger(twoPhaseExecution);
entry.getValue().setFragmentId(fragment.getFragmentId().asInt());
pipelineExecContexts.put(Pair.of(fragment.getFragmentId().asInt(), backendId), pipelineExecContext);
if (needCheckBackendState) {
needCheckPipelineExecContexts.add(pipelineExecContext);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
pipelineExecContext.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
PipelineExecContexts ctxs = beToPipelineExecCtxs.get(pipelineExecContext.backend.getId());
if (ctxs == null) {
ctxs = new PipelineExecContexts(pipelineExecContext.backend.getId(),
pipelineExecContext.brpcAddress, twoPhaseExecution,
entry.getValue().getFragmentNumOnHost());
beToPipelineExecCtxs.putIfAbsent(pipelineExecContext.backend.getId(), ctxs);
}
ctxs.addContext(pipelineExecContext);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<PipelineExecContexts, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
ctxs.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execRemoteFragmentsAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execPlanFragmentStartAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void waitRpc(List<Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>>> futures,
long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
throw new UserException("timeout before waiting for " + operation + " RPC. Elapse(sec): " + (
(System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.getExecutionTimeout()));
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = "timeout when waiting for " + operation + " RPC. Wait(sec): " + timeoutMs / 1000;
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
private void waitPipelineRpc(List<Triple<PipelineExecContexts, BackendServiceProxy,
Future<PExecPlanFragmentResult>>> futures, long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
throw new UserException("timeout before waiting for " + operation + " RPC. Elapse(sec): " + (
(System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.query_timeout));
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<PipelineExecContexts, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = "timeout when waiting for " + operation + " RPC. Wait(sec): " + timeoutMs / 1000;
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
public List<String> getExportFiles() {
return exportFiles;
}
void updateExportFiles(List<String> files) {
lock.lock();
try {
if (exportFiles == null) {
exportFiles = Lists.newArrayList();
}
exportFiles.addAll(files);
} finally {
lock.unlock();
}
}
void updateDeltas(List<String> urls) {
lock.lock();
try {
deltaUrls.addAll(urls);
} finally {
lock.unlock();
}
}
private void updateLoadCounters(Map<String, String> newLoadCounters) {
lock.lock();
try {
long numRowsNormal = 0L;
String value = this.loadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal = Long.parseLong(value);
}
long numRowsAbnormal = 0L;
value = this.loadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal = Long.parseLong(value);
}
long numRowsUnselected = 0L;
value = this.loadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected = Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected += Long.parseLong(value);
}
this.loadCounters.put(LoadEtlTask.DPP_NORMAL_ALL, "" + numRowsNormal);
this.loadCounters.put(LoadEtlTask.DPP_ABNORMAL_ALL, "" + numRowsAbnormal);
this.loadCounters.put(LoadJob.UNSELECTED_ROWS, "" + numRowsUnselected);
} finally {
lock.unlock();
}
}
private void updateCommitInfos(List<TTabletCommitInfo> commitInfos) {
lock.lock();
try {
this.commitInfos.addAll(commitInfos);
} finally {
lock.unlock();
}
}
private void updateErrorTabletInfos(List<TErrorTabletInfo> errorTabletInfos) {
lock.lock();
try {
if (this.errorTabletInfos.size() <= Config.max_error_tablet_of_broker_load) {
this.errorTabletInfos.addAll(errorTabletInfos.stream().limit(Config.max_error_tablet_of_broker_load
- this.errorTabletInfos.size()).collect(Collectors.toList()));
}
} finally {
lock.unlock();
}
}
private void updateStatus(Status status, TUniqueId instanceId) {
lock.lock();
try {
if (returnedAllResults && status.isCancelled()) {
return;
}
if (status.ok()) {
return;
}
if (!queryStatus.ok()) {
return;
}
queryStatus.setStatus(status);
LOG.warn("one instance report fail throw updateStatus(), need cancel. job id: {},"
+ " query id: {}, instance id: {}, error message: {}",
jobId, DebugUtil.printId(queryId), instanceId != null ? DebugUtil.printId(instanceId) : "NaN",
status.getErrorMsg());
if (status.getErrorCode() == TStatusCode.TIMEOUT) {
cancelInternal(Types.PPlanFragmentCancelReason.TIMEOUT);
} else {
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
}
} finally {
lock.unlock();
}
}
public RowBatch getNext() throws Exception {
if (receiver == null) {
throw new UserException("There is no receiver.");
}
RowBatch resultBatch;
Status status = new Status();
if (!isPointQuery) {
resultBatch = receiver.getNext(status);
} else {
resultBatch = pointExec.getNext(status);
}
if (!status.ok()) {
LOG.warn("get next fail, need cancel. query id: {}", DebugUtil.printId(queryId));
}
updateStatus(status, null /* no instance id */);
Status copyStatus = null;
lock();
try {
copyStatus = new Status(queryStatus);
} finally {
unlock();
}
if (!copyStatus.ok()) {
if (Strings.isNullOrEmpty(copyStatus.getErrorMsg())) {
copyStatus.rewriteErrorMsg();
}
if (copyStatus.isRpcError()) {
throw new RpcException(null, copyStatus.getErrorMsg());
} else {
String errMsg = copyStatus.getErrorMsg();
LOG.warn("query failed: {}", errMsg);
int hostIndex = errMsg.indexOf("host");
if (hostIndex != -1) {
errMsg = errMsg.substring(0, hostIndex);
}
throw new UserException(errMsg);
}
}
if (resultBatch.isEos()) {
this.returnedAllResults = true;
Long numLimitRows = fragments.get(0).getPlanRoot().getLimit();
boolean hasLimit = numLimitRows > 0;
if (!isBlockQuery && instanceIds.size() > 1 && hasLimit && numReceivedRows >= numLimitRows) {
LOG.debug("no block query, return num >= limit rows, need cancel");
cancelInternal(Types.PPlanFragmentCancelReason.LIMIT_REACH);
}
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().dryRunQuery) {
numReceivedRows = 0;
numReceivedRows += resultBatch.getQueryStatistics().getReturnedRows();
}
} else if (resultBatch.getBatch() != null) {
numReceivedRows += resultBatch.getBatch().getRowsSize();
}
return resultBatch;
}
public void cancel() {
cancel(Types.PPlanFragmentCancelReason.USER_CANCEL);
}
public void cancel(Types.PPlanFragmentCancelReason cancelReason) {
lock();
try {
if (!queryStatus.ok()) {
return;
} else {
queryStatus.setStatus(Status.CANCELLED);
}
LOG.warn("cancel execution of query, this is outside invoke");
cancelInternal(cancelReason);
} finally {
unlock();
}
}
private void cancelInternal(Types.PPlanFragmentCancelReason cancelReason) {
if (null != receiver) {
receiver.cancel();
}
if (null != pointExec) {
pointExec.cancel();
return;
}
cancelRemoteFragmentsAsync(cancelReason);
executionProfile.onCancel();
}
private void cancelRemoteFragmentsAsync(Types.PPlanFragmentCancelReason cancelReason) {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : pipelineExecContexts.values()) {
ctx.cancelFragmentInstance(cancelReason);
}
} else {
for (BackendExecState backendExecState : backendExecStates) {
backendExecState.cancelFragmentInstance(cancelReason);
}
}
}
private void computeFragmentExecParams() throws Exception {
computeFragmentHosts();
instanceIds.clear();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (LOG.isDebugEnabled()) {
LOG.debug("fragment {} has instances {}",
params.fragment.getFragmentId(), params.instanceExecParams.size());
}
for (int j = 0; j < params.instanceExecParams.size(); ++j) {
TUniqueId instanceId = new TUniqueId();
instanceId.setHi(queryId.hi);
instanceId.setLo(queryId.lo + instanceIds.size() + 1);
params.instanceExecParams.get(j).instanceId = instanceId;
instanceIds.add(instanceId);
}
}
computeMultiCastFragmentParams();
assignRuntimeFilterAddr();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (params.fragment instanceof MultiCastPlanFragment) {
continue;
}
PlanFragment destFragment = params.fragment.getDestFragment();
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
DataSink sink = params.fragment.getSink();
PlanNodeId exchId = sink.getExchNodeId();
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
if (sink.getOutputPartition() != null
&& sink.getOutputPartition().isBucketShuffleHashPartition()) {
Preconditions.checkState(bucketShuffleJoinController
.isBucketShuffleJoin(destFragment.getFragmentId().asInt()), "Sink is"
+ "Bucket Shuffle Partition, The destFragment must have bucket shuffle join node ");
int bucketSeq = 0;
int bucketNum = bucketShuffleJoinController.getFragmentBucketNum(destFragment.getFragmentId());
if (destParams.instanceExecParams.size() == 1 && (bucketNum == 0
|| destParams.instanceExecParams.get(0).bucketSeqSet.isEmpty())) {
bucketNum = 1;
destParams.instanceExecParams.get(0).bucketSeqSet.add(0);
}
TNetworkAddress dummyServer = new TNetworkAddress("0.0.0.0", 0);
while (bucketSeq < bucketNum) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = new TUniqueId(-1, -1);
dest.server = dummyServer;
dest.setBrpcServer(dummyServer);
for (FInstanceExecParam instanceExecParams : destParams.instanceExecParams) {
if (instanceExecParams.bucketSeqSet.contains(bucketSeq)) {
dest.fragment_instance_id = instanceExecParams.instanceId;
dest.server = toRpcHost(instanceExecParams.host);
dest.setBrpcServer(toBrpcHost(instanceExecParams.host));
break;
}
}
bucketSeq++;
params.destinations.add(dest);
}
} else {
if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& params.fragment.isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
params.destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.setBrpcServer(toBrpcHost(destParams.instanceExecParams.get(j).host));
params.destinations.add(dest);
}
}
}
}
}
private TNetworkAddress toRpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
TNetworkAddress dest = new TNetworkAddress(backend.getHost(), backend.getBeRpcPort());
return dest;
}
private TNetworkAddress toBrpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_BACKEND_LOAD_AVAILABLE_MSG);
}
if (backend.getBrpcPort() < 0) {
return null;
}
return new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
}
private boolean containsUnionNode(PlanNode node) {
if (node instanceof UnionNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof UnionNode) {
return true;
} else {
return containsUnionNode(child);
}
}
return false;
}
private boolean containsIntersectNode(PlanNode node) {
if (node instanceof IntersectNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof IntersectNode) {
return true;
} else {
return containsIntersectNode(child);
}
}
return false;
}
private boolean containsExceptNode(PlanNode node) {
if (node instanceof ExceptNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof ExceptNode) {
return true;
} else {
return containsExceptNode(child);
}
}
return false;
}
private boolean containsSetOperationNode(PlanNode node) {
if (node instanceof SetOperationNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof SetOperationNode) {
return true;
} else {
return containsSetOperationNode(child);
}
}
return false;
}
private void computeFragmentHosts() throws Exception {
for (int i = fragments.size() - 1; i >= 0; --i) {
PlanFragment fragment = fragments.get(i);
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
if (fragment.getDataPartition() == DataPartition.UNPARTITIONED) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (((ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()) || (isAllExternalScan
&& Config.prefer_compute_node_for_external_table)) && !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
LOG.warn("DataPartition UNPARTITIONED, no scanNode Backend available");
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport,
0, params);
params.instanceExecParams.add(instanceParam);
continue;
}
Pair<PlanNode, PlanNode> pairNodes = findLeftmostNode(fragment.getPlanRoot());
PlanNode fatherNode = pairNodes.first;
PlanNode leftMostNode = pairNodes.second;
/*
* Case A:
* if the left most is ScanNode, which means there is no child fragment,
* we should assign fragment instances on every scan node hosts.
* Case B:
* if not, there should be exchange nodes to collect all data from child fragments(input fragments),
* so we should assign fragment instances corresponding to the child fragments' host
*/
if (!(leftMostNode instanceof ScanNode)) {
int inputFragmentIndex = 0;
int maxParallelism = 0;
int childrenCount = (fatherNode != null) ? fatherNode.getChildren().size() : 1;
for (int j = 0; j < childrenCount; j++) {
int currentChildFragmentParallelism
= fragmentExecParamsMap.get(fragment.getChild(j).getFragmentId()).instanceExecParams.size();
if (currentChildFragmentParallelism > maxParallelism) {
maxParallelism = currentChildFragmentParallelism;
inputFragmentIndex = j;
}
}
PlanFragmentId inputFragmentId = fragment.getChild(inputFragmentIndex).getFragmentId();
int exchangeInstances = -1;
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable() != null) {
exchangeInstances = ConnectContext.get().getSessionVariable().getExchangeInstanceParallel();
}
if (leftMostNode.getNumInstances() == 1) {
exchangeInstances = 1;
}
if (exchangeInstances > 0 && fragmentExecParamsMap.get(inputFragmentId)
.instanceExecParams.size() > exchangeInstances) {
Set<TNetworkAddress> hostSet = Sets.newHashSet();
for (FInstanceExecParam execParams :
fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
hostSet.add(execParams.host);
}
List<TNetworkAddress> hosts = Lists.newArrayList(hostSet);
Collections.shuffle(hosts, instanceRandom);
for (int index = 0; index < exchangeInstances; index++) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null,
hosts.get(index % hosts.size()), 0, params);
params.instanceExecParams.add(instanceParam);
}
} else {
for (FInstanceExecParam execParams
: fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execParams.host, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
Collections.shuffle(params.instanceExecParams, instanceRandom);
continue;
}
int parallelExecInstanceNum = fragment.getParallelExecNum();
if ((isColocateFragment(fragment, fragment.getPlanRoot())
&& fragmentIdToSeqToAddressMap.containsKey(fragment.getFragmentId())
&& fragmentIdToSeqToAddressMap.get(fragment.getFragmentId()).size() > 0)) {
computeColocateJoinInstanceParam(fragment.getFragmentId(), parallelExecInstanceNum, params);
} else if (bucketShuffleJoinController.isBucketShuffleJoin(fragment.getFragmentId().asInt())) {
bucketShuffleJoinController.computeInstanceParam(fragment.getFragmentId(),
parallelExecInstanceNum, params);
} else {
for (Entry<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> entry : fragmentExecParamsMap.get(
fragment.getFragmentId()).scanRangeAssignment.entrySet()) {
TNetworkAddress key = entry.getKey();
Map<Integer, List<TScanRangeParams>> value = entry.getValue();
for (Integer planNodeId : value.keySet()) {
List<TScanRangeParams> perNodeScanRanges = value.get(planNodeId);
List<List<TScanRangeParams>> perInstanceScanRanges = Lists.newArrayList();
List<Boolean> sharedScanOpts = Lists.newArrayList();
Optional<ScanNode> node = scanNodes.stream().filter(scanNode -> {
return scanNode.getId().asInt() == planNodeId;
}).findFirst();
if (!enablePipelineEngine || perNodeScanRanges.size() > parallelExecInstanceNum
|| (node.isPresent() && node.get().getShouldColoScan())
|| (node.isPresent() && node.get() instanceof FileScanNode)
|| Config.disable_shared_scan) {
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(perNodeScanRanges.size(), parallelExecInstanceNum);
}
perInstanceScanRanges = ListUtil.splitBySize(perNodeScanRanges,
expectedInstanceNum);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), false);
} else {
int expectedInstanceNum = Math.min(parallelExecInstanceNum,
leftMostNode.getNumInstances());
expectedInstanceNum = Math.max(expectedInstanceNum, 1);
perInstanceScanRanges = Collections.nCopies(expectedInstanceNum, perNodeScanRanges);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), true);
}
LOG.debug("scan range number per instance is: {}", perInstanceScanRanges.size());
for (int j = 0; j < perInstanceScanRanges.size(); j++) {
List<TScanRangeParams> scanRangeParams = perInstanceScanRanges.get(j);
boolean sharedScan = sharedScanOpts.get(j);
FInstanceExecParam instanceParam = new FInstanceExecParam(null, key, 0, params);
instanceParam.perNodeScanRanges.put(planNodeId, scanRangeParams);
instanceParam.perNodeSharedScans.put(planNodeId, sharedScan);
params.instanceExecParams.add(instanceParam);
}
}
}
}
if (params.instanceExecParams.isEmpty()) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()
&& !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
}
private void assignRuntimeFilterAddr() throws Exception {
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
for (RuntimeFilterId rid : fragment.getTargetRuntimeFilterIds()) {
List<FRuntimeFilterTargetParam> targetFragments = ridToTargetParam.computeIfAbsent(rid,
k -> new ArrayList<>());
for (final FInstanceExecParam instance : params.instanceExecParams) {
targetFragments.add(new FRuntimeFilterTargetParam(instance.instanceId, toBrpcHost(instance.host)));
}
}
for (RuntimeFilterId rid : fragment.getBuilderRuntimeFilterIds()) {
ridToBuilderNum.merge(rid, params.instanceExecParams.size(), Integer::sum);
}
}
FragmentExecParams uppermostParams = fragmentExecParamsMap.get(fragments.get(0).getFragmentId());
runtimeFilterMergeAddr = toBrpcHost(uppermostParams.instanceExecParams.get(0).host);
runtimeFilterMergeInstanceId = uppermostParams.instanceExecParams.get(0).instanceId;
}
private boolean isColocateFragment(PlanFragment planFragment, PlanNode node) {
if (ConnectContext.get() != null) {
if (ConnectContext.get().getSessionVariable().isDisableColocatePlan()) {
return false;
}
}
if (colocateFragmentIds.contains(node.getFragmentId().asInt())) {
return true;
}
if (planFragment.hasColocatePlanNode()) {
colocateFragmentIds.add(planFragment.getId().asInt());
return true;
}
return false;
}
private Pair<PlanNode, PlanNode> findLeftmostNode(PlanNode plan) {
PlanNode newPlan = plan;
PlanNode fatherPlan = null;
while (newPlan.getChildren().size() != 0 && !(newPlan instanceof ExchangeNode)) {
fatherPlan = newPlan;
newPlan = newPlan.getChild(0);
}
return Pair.of(fatherPlan, newPlan);
}
private <K, V> V findOrInsert(Map<K, V> m, final K key, final V defaultVal) {
V value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private List<TScanRangeParams> findOrInsert(Map<Integer, List<TScanRangeParams>> m, Integer key,
ArrayList<TScanRangeParams> defaultVal) {
List<TScanRangeParams> value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private void computeColocateJoinInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges : bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.bucketSeqSet.add(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
private Map<TNetworkAddress, Long> getReplicaNumPerHostForOlapTable() {
Map<TNetworkAddress, Long> replicaNumPerHost = Maps.newHashMap();
for (ScanNode scanNode : scanNodes) {
List<TScanRangeLocations> locationsList = scanNode.getScanRangeLocations(0);
for (TScanRangeLocations locations : locationsList) {
for (TScanRangeLocation location : locations.locations) {
if (replicaNumPerHost.containsKey(location.server)) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) + 1L);
} else {
replicaNumPerHost.put(location.server, 1L);
}
}
}
}
return replicaNumPerHost;
}
private void computeScanRangeAssignment() throws Exception {
if (isPointQuery) {
List<TScanRangeLocations> locations = ((OlapScanNode) scanNodes.get(0)).lazyEvaluateRangeLocations();
Preconditions.checkNotNull(locations);
return;
}
Map<TNetworkAddress, Long> assignedBytesPerHost = Maps.newHashMap();
Map<TNetworkAddress, Long> replicaNumPerHost = getReplicaNumPerHostForOlapTable();
Collections.shuffle(scanNodes);
for (ScanNode scanNode : scanNodes) {
if (!(scanNode instanceof ExternalScanNode)) {
isAllExternalScan = false;
}
List<TScanRangeLocations> locations;
locations = scanNode.getScanRangeLocations(0);
if (locations == null) {
continue;
}
Collections.shuffle(locations);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.computeIfAbsent(scanNode.getFragmentId(),
k -> Sets.newHashSet());
scanNodeIds.add(scanNode.getId().asInt());
if (scanNode instanceof FileQueryScanNode) {
fileScanRangeParamsMap.put(
scanNode.getId().asInt(), ((FileQueryScanNode) scanNode).getFileScanRangeParams());
}
FragmentScanRangeAssignment assignment
= fragmentExecParamsMap.get(scanNode.getFragmentId()).scanRangeAssignment;
boolean fragmentContainsColocateJoin = isColocateFragment(scanNode.getFragment(),
scanNode.getFragment().getPlanRoot());
boolean fragmentContainsBucketShuffleJoin = bucketShuffleJoinController
.isBucketShuffleJoin(scanNode.getFragmentId().asInt(), scanNode.getFragment().getPlanRoot());
if (fragmentContainsColocateJoin) {
computeScanRangeAssignmentByColocate((OlapScanNode) scanNode, assignedBytesPerHost, replicaNumPerHost);
}
if (fragmentContainsBucketShuffleJoin) {
bucketShuffleJoinController.computeScanRangeAssignmentByBucket((OlapScanNode) scanNode,
idToBackend, addressToBackendID, replicaNumPerHost);
}
if (!(fragmentContainsColocateJoin || fragmentContainsBucketShuffleJoin)) {
computeScanRangeAssignmentByScheduler(scanNode, locations, assignment, assignedBytesPerHost,
replicaNumPerHost);
}
}
}
private void computeScanRangeAssignmentByColocate(
final OlapScanNode scanNode, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdTobucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0),
scanNode.getFragmentId(), bucketSeq, assignedBytesPerHost, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost)
throws Exception {
Reference<Long> backendIdRef = new Reference<Long>();
selectBackendsByRoundRobin(seqLocation, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
public TScanRangeLocation selectBackendsByRoundRobin(TScanRangeLocations seqLocation,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
if (!Config.enable_local_replica_selection) {
return selectBackendsByRoundRobin(seqLocation.getLocations(), assignedBytesPerHost, replicaNumPerHost,
backendIdRef);
}
List<TScanRangeLocation> localLocations = new ArrayList<>();
List<TScanRangeLocation> nonlocalLocations = new ArrayList<>();
long localBeId = Env.getCurrentSystemInfo().getBackendIdByHost(FrontendOptions.getLocalHostAddress());
for (final TScanRangeLocation location : seqLocation.getLocations()) {
if (location.backend_id == localBeId) {
localLocations.add(location);
} else {
nonlocalLocations.add(location);
}
}
try {
return selectBackendsByRoundRobin(localLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
} catch (UserException ue) {
if (!Config.enable_local_replica_selection_fallback) {
throw ue;
}
return selectBackendsByRoundRobin(nonlocalLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
}
}
public TScanRangeLocation selectBackendsByRoundRobin(List<TScanRangeLocation> locations,
Map<TNetworkAddress, Long> assignedBytesPerHost, Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
Long minAssignedBytes = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
TScanRangeLocation minLocation = null;
Long step = 1L;
for (final TScanRangeLocation location : locations) {
Long assignedBytes = findOrInsert(assignedBytesPerHost, location.server, 0L);
if (assignedBytes < minAssignedBytes || (assignedBytes.equals(minAssignedBytes)
&& replicaNumPerHost.get(location.server) < minReplicaNum)) {
minAssignedBytes = assignedBytes;
minReplicaNum = replicaNumPerHost.get(location.server);
minLocation = location;
}
}
for (TScanRangeLocation location : locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
TScanRangeLocation location = SimpleScheduler.getLocation(minLocation, locations,
this.idToBackend, backendIdRef);
assignedBytesPerHost.put(location.server, assignedBytesPerHost.get(location.server) + step);
return location;
}
private void computeScanRangeAssignmentByScheduler(
final ScanNode scanNode,
final List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
for (TScanRangeLocations scanRangeLocations : locations) {
Reference<Long> backendIdRef = new Reference<Long>();
TScanRangeLocation minLocation = selectBackendsByRoundRobin(scanRangeLocations,
assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
Map<Integer, List<TScanRangeParams>> scanRanges = findOrInsert(assignment, execHostPort,
new HashMap<Integer, List<TScanRangeParams>>());
List<TScanRangeParams> scanRangeParamsList = findOrInsert(scanRanges, scanNode.getId().asInt(),
new ArrayList<TScanRangeParams>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = scanRangeLocations.scan_range;
scanRangeParams.setVolumeId(minLocation.volume_id);
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
private void updateScanRangeNumByScanRange(TScanRangeParams param) {
TScanRange scanRange = param.getScanRange();
if (scanRange == null) {
return;
}
TBrokerScanRange brokerScanRange = scanRange.getBrokerScanRange();
if (brokerScanRange != null) {
scanRangeNum += brokerScanRange.getRanges().size();
}
TExternalScanRange externalScanRange = scanRange.getExtScanRange();
if (externalScanRange != null) {
TFileScanRange fileScanRange = externalScanRange.getFileScanRange();
if (fileScanRange != null) {
scanRangeNum += fileScanRange.getRanges().size();
}
}
TPaloScanRange paloScanRange = scanRange.getPaloScanRange();
if (paloScanRange != null) {
scanRangeNum = scanRangeNum + 1;
}
}
public void updateFragmentExecStatus(TReportExecStatusParams params) {
if (enablePipelineEngine) {
PipelineExecContext ctx = pipelineExecContexts.get(Pair.of(params.getFragmentId(), params.getBackendId()));
if (!ctx.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
ctx.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (ctx.fragmentInstancesMap.get(params.fragment_instance_id).getIsDone()) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
} else {
if (params.backend_num >= backendExecStates.size()) {
LOG.warn("unknown backend number: {}, expected less than: {}",
params.backend_num, backendExecStates.size());
return;
}
BackendExecState execState = backendExecStates.get(params.backend_num);
if (!execState.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
execState.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (execState.done) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
}
if (params.isSetLoadedRows() && jobId != -1) {
Env.getCurrentEnv().getLoadManager().updateJobProgress(
jobId, params.getBackendId(), params.getQueryId(), params.getFragmentInstanceId(),
params.getLoadedRows(), params.getLoadedBytes(), params.isDone());
Env.getCurrentEnv().getProgressManager().updateProgress(String.valueOf(jobId),
params.getQueryId(), params.getFragmentInstanceId(), params.getFinishedScanRanges());
}
}
/*
* Waiting the coordinator finish executing.
* return false if waiting timeout.
* return true otherwise.
* NOTICE: return true does not mean that coordinator executed success,
* the caller should check queryStatus for result.
*
* We divide the entire waiting process into multiple rounds,
* with a maximum of 30 seconds per round. And after each round of waiting,
* check the status of the BE. If the BE status is abnormal, the wait is ended
* and the result is returned. Otherwise, continue to the next round of waiting.
* This method mainly avoids the problem that the Coordinator waits for a long time
* after some BE can no long return the result due to some exception, such as BE is down.
*/
public boolean join(int timeoutS) {
final long fixedMaxWaitTime = 30;
long leftTimeoutS = timeoutS;
while (leftTimeoutS > 0) {
long waitTime = Math.min(leftTimeoutS, fixedMaxWaitTime);
boolean awaitRes = false;
try {
awaitRes = executionProfile.awaitAllInstancesDone(waitTime);
} catch (InterruptedException e) {
}
if (awaitRes) {
return true;
}
if (!checkBackendState()) {
return true;
}
leftTimeoutS -= waitTime;
}
return false;
}
/*
* Check the state of backends in needCheckBackendExecStates.
* return true if all of them are OK. Otherwise, return false.
*/
private boolean checkBackendState() {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : needCheckPipelineExecContexts) {
if (!ctx.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ ctx.backend.getId() + " is down");
return false;
}
}
} else {
for (BackendExecState backendExecState : needCheckBackendExecStates) {
if (!backendExecState.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ backendExecState.backend.getId() + " is down");
return false;
}
}
}
return true;
}
public boolean isDone() {
return executionProfile.isAllInstancesDone();
}
class FragmentScanRangeAssignment
extends HashMap<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> {
}
class BucketSeqToScanRange extends HashMap<Integer, Map<Integer, List<TScanRangeParams>>> {
}
class BucketShuffleJoinController {
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdBucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap
= Maps.newHashMap();
private final Map<PlanFragmentId, Map<Long, Integer>> fragmentIdToBuckendIdBucketCountMap = Maps.newHashMap();
private final Map<PlanFragmentId, Integer> fragmentIdToBucketNumMap = Maps.newHashMap();
private final Set<Integer> bucketShuffleFragmentIds = new HashSet<>();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds;
public BucketShuffleJoinController(Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds) {
this.fragmentIdToScanNodeIds = fragmentIdToScanNodeIds;
}
private boolean isBucketShuffleJoin(int fragmentId, PlanNode node) {
if (ConnectContext.get() != null) {
if (!ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin()
&& !ConnectContext.get().getSessionVariable().isEnableNereidsPlanner()) {
return false;
}
}
if (fragmentId != node.getFragmentId().asInt()) {
return false;
}
if (bucketShuffleFragmentIds.contains(fragmentId)) {
return true;
}
if (node instanceof HashJoinNode) {
HashJoinNode joinNode = (HashJoinNode) node;
if (joinNode.isBucketShuffle()) {
bucketShuffleFragmentIds.add(joinNode.getFragmentId().asInt());
return true;
}
}
for (PlanNode childNode : node.getChildren()) {
if (isBucketShuffleJoin(fragmentId, childNode)) {
return true;
}
}
return false;
}
private boolean isBucketShuffleJoin(int fragmentId) {
return bucketShuffleFragmentIds.contains(fragmentId);
}
private int getFragmentBucketNum(PlanFragmentId fragmentId) {
return fragmentIdToBucketNumMap.get(fragmentId);
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
Map<Long, Integer> buckendIdToBucketCountMap = fragmentIdToBuckendIdBucketCountMap.get(fragmentId);
int maxBucketNum = Integer.MAX_VALUE;
long buckendId = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
for (TScanRangeLocation location : seqLocation.locations) {
if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) < maxBucketNum) {
maxBucketNum = buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0);
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
} else if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) == maxBucketNum
&& replicaNumPerHost.get(location.server) < minReplicaNum) {
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
}
}
Reference<Long> backendIdRef = new Reference<>();
TNetworkAddress execHostPort = SimpleScheduler.getHost(buckendId,
seqLocation.locations, idToBackend, backendIdRef);
if (backendIdRef.getRef() != buckendId) {
buckendIdToBucketCountMap.put(backendIdRef.getRef(),
buckendIdToBucketCountMap.getOrDefault(backendIdRef.getRef(), 0) + 1);
} else {
buckendIdToBucketCountMap.put(buckendId, buckendIdToBucketCountMap.getOrDefault(buckendId, 0) + 1);
}
for (TScanRangeLocation location : seqLocation.locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
private void computeScanRangeAssignmentByBucket(
final OlapScanNode scanNode, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
int bucketNum = 0;
if (scanNode.getOlapTable().isColocateTable()) {
bucketNum = scanNode.getOlapTable().getDefaultDistributionInfo().getBucketNum();
} else {
bucketNum = (int) (scanNode.getTotalTabletsNum());
}
fragmentIdToBucketNumMap.put(scanNode.getFragmentId(), bucketNum);
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdBucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
fragmentIdToBuckendIdBucketCountMap.put(scanNode.getFragmentId(), new HashMap<>());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress
= fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0), scanNode.getFragmentId(),
bucketSeq, idToBackend, addressToBackendID, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void computeInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges
: bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(
null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.addBucketSeq(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey())
.addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
}
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdTobucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap = Maps.newHashMap();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds = Maps.newHashMap();
private final Set<Integer> colocateFragmentIds = new HashSet<>();
private final BucketShuffleJoinController bucketShuffleJoinController
= new BucketShuffleJoinController(fragmentIdToScanNodeIds);
public class BackendExecState {
TExecPlanFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
int profileFragmentId;
RuntimeProfile instanceProfile;
RuntimeProfile loadChannelProfile;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
TUniqueId instanceId;
public BackendExecState(PlanFragmentId fragmentId, int instanceId, int profileFragmentId,
TExecPlanFragmentParams rpcParams, Map<TNetworkAddress, Long> addressToBackendID,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.initiated = false;
this.done = false;
FInstanceExecParam fi = fragmentExecParamsMap.get(fragmentId).instanceExecParams.get(instanceId);
this.instanceId = fi.instanceId;
this.address = fi.host;
this.backend = idToBackend.get(addressToBackendID.get(address));
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
String name = "Instance " + DebugUtil.printId(fi.instanceId) + " (host=" + address + ")";
this.loadChannelProfile = loadChannelProfile;
this.instanceProfile = new RuntimeProfile(name);
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
if (this.done) {
return false;
}
if (params.isSetProfile()) {
instanceProfile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
this.done = params.done;
if (statsErrorEstimator != null) {
statsErrorEstimator.updateExactReturnedRows(params);
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.instanceProfile.computeTimeInProfile();
this.instanceProfile.prettyPrint(builder, "");
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(fragmentInstanceId()), cancelReason.name());
}
try {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
fragmentInstanceId(), cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
this.hasCanceled = true;
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
instanceProfile.computeTimeInProfile();
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public FragmentInstanceInfo buildFragmentInstanceInfo() {
return new QueryStatisticsItem.FragmentInstanceInfo.Builder().instanceId(fragmentInstanceId())
.fragmentId(String.valueOf(fragmentId)).address(this.address).build();
}
private TUniqueId fragmentInstanceId() {
return this.rpcParams.params.getFragmentInstanceId();
}
}
public class PipelineExecContext {
TPipelineFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap;
RuntimeProfile loadChannelProfile;
int cancelProgress = 0;
int profileFragmentId;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
long profileReportProgress = 0;
private final int numInstances;
public PipelineExecContext(PlanFragmentId fragmentId, int profileFragmentId,
TPipelineFragmentParams rpcParams, Long backendId,
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.numInstances = rpcParams.local_params.size();
this.fragmentInstancesMap = fragmentInstancesMap;
this.loadChannelProfile = loadChannelProfile;
this.initiated = false;
this.done = false;
this.backend = idToBackend.get(backendId);
this.address = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
RuntimeProfile profile = fragmentInstancesMap.get(params.fragment_instance_id);
if (params.done && profile.getIsDone()) {
return false;
}
if (params.isSetProfile()) {
profile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
if (params.done) {
profile.setIsDone(true);
profileReportProgress++;
}
if (profileReportProgress == numInstances) {
this.done = true;
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.fragmentInstancesMap.values().stream().forEach(p -> {
p.computeTimeInProfile();
p.prettyPrint(builder, "");
});
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
for (TPipelineInstanceParams localParam : rpcParams.local_params) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(localParam.fragment_instance_id), cancelReason.name());
}
RuntimeProfile profile = fragmentInstancesMap.get(localParam.fragment_instance_id);
if (profile.getIsDone() || profile.getIsCancel()) {
continue;
}
this.hasCanceled = true;
try {
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
localParam.fragment_instance_id, cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
}
if (!this.hasCanceled) {
return false;
}
for (int i = 0; i < this.numInstances; i++) {
fragmentInstancesMap.get(rpcParams.local_params.get(i).fragment_instance_id).setIsCancel(true);
}
cancelProgress = numInstances;
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public List<QueryStatisticsItem.FragmentInstanceInfo> buildFragmentInstanceInfo() {
return this.rpcParams.local_params.stream().map(it -> new FragmentInstanceInfo.Builder()
.instanceId(it.fragment_instance_id).fragmentId(String.valueOf(fragmentId))
.address(this.address).build()).collect(Collectors.toList());
}
}
/**
* A set of BackendExecState for same Backend
*/
public class BackendExecStates {
long beId;
TNetworkAddress brpcAddr;
List<BackendExecState> states = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
public BackendExecStates(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
}
public void addState(BackendExecState state) {
this.states.add(state);
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (BackendExecState state : states) {
if (first) {
first = false;
continue;
}
state.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TExecPlanFragmentParamsList paramsList = new TExecPlanFragmentParamsList();
for (BackendExecState state : states) {
state.initiated = true;
paramsList.addToParamsList(state.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
public class PipelineExecContexts {
long beId;
TNetworkAddress brpcAddr;
List<PipelineExecContext> ctxs = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
int instanceNumber;
public PipelineExecContexts(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution,
int instanceNumber) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
this.instanceNumber = instanceNumber;
}
public void addContext(PipelineExecContext ctx) {
this.ctxs.add(ctx);
}
public int getInstanceNumber() {
return instanceNumber;
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (PipelineExecContext ctx : ctxs) {
if (first) {
first = false;
continue;
}
ctx.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TPipelineFragmentParamsList paramsList = new TPipelineFragmentParamsList();
for (PipelineExecContext cts : ctxs) {
cts.initiated = true;
paramsList.addToParamsList(cts.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
protected class FragmentExecParams {
public PlanFragment fragment;
public List<TPlanFragmentDestination> destinations = Lists.newArrayList();
public Map<Integer, Integer> perExchNumSenders = Maps.newHashMap();
public List<PlanFragmentId> inputFragments = Lists.newArrayList();
public List<FInstanceExecParam> instanceExecParams = Lists.newArrayList();
public FragmentScanRangeAssignment scanRangeAssignment = new FragmentScanRangeAssignment();
public FragmentExecParams(PlanFragment fragment) {
this.fragment = fragment;
}
List<TExecPlanFragmentParams> toThrift(int backendNum) {
List<TExecPlanFragmentParams> paramsList = Lists.newArrayList();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
TExecPlanFragmentParams params = new TExecPlanFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setFragment(fragment.toThrift());
params.setDescTbl(descTable);
params.setParams(new TPlanFragmentExecParams());
params.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
params.params.setQueryId(queryId);
params.params.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
}
params.params.setPerNodeScanRanges(scanRanges);
params.params.setPerExchNumSenders(perExchNumSenders);
params.params.setDestinations(destinations);
params.params.setSenderId(i);
params.params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setBackendNum(backendNum++);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(false);
params.params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.params.setRuntimeFilterParams(new TRuntimeFilterParams());
params.params.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
params.params.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
params.params.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
params.params.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
params.params.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.setFileScanParams(fileScanRangeParamsMap);
paramsList.add(params);
}
return paramsList;
}
Map<TNetworkAddress, TPipelineFragmentParams> toTPipelineParams(int backendNum) {
long memLimit = queryOptions.getMemLimit();
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceExecParams.size());
memLimit = queryOptions.getMemLimit() / rate;
}
Map<TNetworkAddress, TPipelineFragmentParams> res = new HashMap();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
if (!res.containsKey(instanceExecParam.host)) {
TPipelineFragmentParams params = new TPipelineFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setDescTbl(descTable);
params.setQueryId(queryId);
params.setPerExchNumSenders(perExchNumSenders);
params.setDestinations(destinations);
params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(true);
params.query_options.setMemLimit(memLimit);
params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.setFragment(fragment.toThrift());
params.setLocalParams(Lists.newArrayList());
if (tWorkloadGroups != null) {
params.setWorkloadGroups(tWorkloadGroups);
}
params.setFileScanParams(fileScanRangeParamsMap);
res.put(instanceExecParam.host, params);
}
TPipelineFragmentParams params = res.get(instanceExecParam.host);
TPipelineInstanceParams localParams = new TPipelineInstanceParams();
localParams.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
localParams.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
Map<Integer, Boolean> perNodeSharedScans = instanceExecParam.perNodeSharedScans;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
perNodeSharedScans = Maps.newHashMap();
}
localParams.setPerNodeScanRanges(scanRanges);
localParams.setPerNodeSharedScans(perNodeSharedScans);
localParams.setSenderId(i);
localParams.setBackendNum(backendNum++);
localParams.setRuntimeFilterParams(new TRuntimeFilterParams());
localParams.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
localParams.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
localParams.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
localParams.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
localParams.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.getLocalParams().add(localParams);
}
return res;
}
public void appendScanRange(StringBuilder sb, List<TScanRangeParams> params) {
sb.append("range=[");
int idx = 0;
for (TScanRangeParams range : params) {
TPaloScanRange paloScanRange = range.getScanRange().getPaloScanRange();
if (paloScanRange != null) {
if (idx++ != 0) {
sb.append(",");
}
sb.append("{tid=").append(paloScanRange.getTabletId())
.append(",ver=").append(paloScanRange.getVersion()).append("}");
}
TEsScanRange esScanRange = range.getScanRange().getEsScanRange();
if (esScanRange != null) {
sb.append("{ index=").append(esScanRange.getIndex())
.append(", shardid=").append(esScanRange.getShardId())
.append("}");
}
}
sb.append("]");
}
public void appendTo(StringBuilder sb) {
sb.append("{plan=");
fragment.getPlanRoot().appendTrace(sb);
sb.append(",instance=[");
for (int i = 0; i < instanceExecParams.size(); ++i) {
if (i != 0) {
sb.append(",");
}
TNetworkAddress address = instanceExecParams.get(i).host;
Map<Integer, List<TScanRangeParams>> scanRanges =
scanRangeAssignment.get(address);
sb.append("{");
sb.append("id=").append(DebugUtil.printId(instanceExecParams.get(i).instanceId));
sb.append(",host=").append(instanceExecParams.get(i).host);
if (scanRanges == null) {
sb.append("}");
continue;
}
sb.append(",range=[");
int eIdx = 0;
for (Map.Entry<Integer, List<TScanRangeParams>> entry : scanRanges.entrySet()) {
if (eIdx++ != 0) {
sb.append(",");
}
sb.append("id").append(entry.getKey()).append(",");
appendScanRange(sb, entry.getValue());
}
sb.append("]");
sb.append("}");
}
sb.append("]");
sb.append("}");
}
} | class Coordinator {
private static final Logger LOG = LogManager.getLogger(Coordinator.class);
private static final String localIP = FrontendOptions.getLocalHostAddress();
private static final Random instanceRandom = new Random();
Status queryStatus = new Status();
Map<TNetworkAddress, Long> addressToBackendID = Maps.newHashMap();
private ImmutableMap<Long, Backend> idToBackend = ImmutableMap.of();
private final TDescriptorTable descTable;
private Map<Integer, TFileScanRangeParams> fileScanRangeParamsMap = Maps.newHashMap();
private final TQueryGlobals queryGlobals = new TQueryGlobals();
private TQueryOptions queryOptions;
private TNetworkAddress coordAddress;
private final Lock lock = new ReentrantLock();
private boolean returnedAllResults;
private final Map<PlanFragmentId, FragmentExecParams> fragmentExecParamsMap = Maps.newHashMap();
private final List<PlanFragment> fragments;
private int instanceTotalNum;
private Map<Long, BackendExecStates> beToExecStates = Maps.newHashMap();
private Map<Long, PipelineExecContexts> beToPipelineExecCtxs = Maps.newHashMap();
private final List<BackendExecState> backendExecStates = Lists.newArrayList();
private final Map<Pair<Integer, Long>, PipelineExecContext> pipelineExecContexts = new HashMap<>();
private final List<BackendExecState> needCheckBackendExecStates = Lists.newArrayList();
private final List<PipelineExecContext> needCheckPipelineExecContexts = Lists.newArrayList();
private ResultReceiver receiver;
private final List<ScanNode> scanNodes;
private int scanRangeNum = 0;
private final Set<TUniqueId> instanceIds = Sets.newHashSet();
private final boolean isBlockQuery;
private int numReceivedRows = 0;
private List<String> deltaUrls;
private Map<String, String> loadCounters;
private String trackingUrl;
private List<String> exportFiles;
private final List<TTabletCommitInfo> commitInfos = Lists.newArrayList();
private final List<TErrorTabletInfo> errorTabletInfos = Lists.newArrayList();
private long jobId = -1;
private TUniqueId queryId;
private final boolean needReport;
private final TUniqueId nextInstanceId;
private long timeoutDeadline;
private boolean enableShareHashTableForBroadcastJoin = false;
private boolean enablePipelineEngine = false;
public TNetworkAddress runtimeFilterMergeAddr;
public TUniqueId runtimeFilterMergeInstanceId;
public Map<RuntimeFilterId, List<FRuntimeFilterTargetParam>> ridToTargetParam = Maps.newHashMap();
public List<RuntimeFilter> assignedRuntimeFilters = new ArrayList<>();
public Map<RuntimeFilterId, Integer> ridToBuilderNum = Maps.newHashMap();
private boolean isPointQuery = false;
private PointQueryExec pointExec = null;
private StatsErrorEstimator statsErrorEstimator;
public void setTWorkloadGroups(List<TPipelineWorkloadGroup> tWorkloadGroups) {
this.tWorkloadGroups = tWorkloadGroups;
}
private List<TPipelineWorkloadGroup> tWorkloadGroups = Lists.newArrayList();
private final ExecutionProfile executionProfile;
public ExecutionProfile getExecutionProfile() {
return executionProfile;
}
private boolean isAllExternalScan = true;
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner,
StatsErrorEstimator statsErrorEstimator) {
this(context, analyzer, planner);
this.statsErrorEstimator = statsErrorEstimator;
}
public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner) {
this.isBlockQuery = planner.isBlockQuery();
this.queryId = context.queryId();
this.fragments = planner.getFragments();
this.scanNodes = planner.getScanNodes();
if (this.scanNodes.size() == 1 && this.scanNodes.get(0) instanceof OlapScanNode) {
OlapScanNode olapScanNode = (OlapScanNode) (this.scanNodes.get(0));
isPointQuery = olapScanNode.isPointQuery();
if (isPointQuery) {
PlanFragment fragment = fragments.get(0);
LOG.debug("execPointGet fragment {}", fragment);
OlapScanNode planRoot = (OlapScanNode) fragment.getPlanRoot();
Preconditions.checkNotNull(planRoot);
pointExec = new PointQueryExec(planRoot.getPointQueryEqualPredicates(),
planRoot.getDescTable(), fragment.getOutputExprs());
}
}
PrepareStmt prepareStmt = analyzer == null ? null : analyzer.getPrepareStmt();
if (prepareStmt != null) {
this.descTable = prepareStmt.getDescTable();
if (pointExec != null) {
pointExec.setCacheID(prepareStmt.getID());
pointExec.setSerializedDescTable(prepareStmt.getSerializedDescTable());
pointExec.setSerializedOutputExpr(prepareStmt.getSerializedOutputExprs());
pointExec.setBinaryProtocol(prepareStmt.isBinaryProtocol());
}
} else {
this.descTable = planner.getDescTable().toThrift();
}
this.returnedAllResults = false;
this.enableShareHashTableForBroadcastJoin = context.getSessionVariable().enableShareHashTableForBroadcastJoin;
this.enablePipelineEngine = context.getSessionVariable().getEnablePipelineEngine()
&& (fragments.size() > 0 && fragments.get(0).getSink() instanceof ResultSink);
initQueryOptions(context);
setFromUserProperty(context);
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setNanoSeconds(LocalDateTime.now().getNano());
this.queryGlobals.setLoadZeroTolerance(false);
if (context.getSessionVariable().getTimeZone().equals("CST")) {
this.queryGlobals.setTimeZone(TimeUtils.DEFAULT_TIME_ZONE);
} else {
this.queryGlobals.setTimeZone(context.getSessionVariable().getTimeZone());
}
this.needReport = context.getSessionVariable().enableProfile();
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.assignedRuntimeFilters = planner.getRuntimeFilters();
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
}
public Coordinator(Long jobId, TUniqueId queryId, DescriptorTable descTable, List<PlanFragment> fragments,
List<ScanNode> scanNodes, String timezone, boolean loadZeroTolerance) {
this.isBlockQuery = true;
this.jobId = jobId;
this.queryId = queryId;
this.descTable = descTable.toThrift();
this.fragments = fragments;
this.scanNodes = scanNodes;
this.queryOptions = new TQueryOptions();
this.queryGlobals.setNowString(TimeUtils.DATETIME_FORMAT.format(LocalDateTime.now()));
this.queryGlobals.setTimestampMs(System.currentTimeMillis());
this.queryGlobals.setTimeZone(timezone);
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.needReport = true;
this.nextInstanceId = new TUniqueId();
nextInstanceId.setHi(queryId.hi);
nextInstanceId.setLo(queryId.lo + 1);
this.executionProfile = new ExecutionProfile(queryId, fragments.size());
}
private void setFromUserProperty(ConnectContext connectContext) {
String qualifiedUser = connectContext.getQualifiedUser();
int cpuLimit = Env.getCurrentEnv().getAuth().getCpuResourceLimit(qualifiedUser);
if (cpuLimit > 0) {
TResourceLimit resourceLimit = new TResourceLimit();
resourceLimit.setCpuLimit(cpuLimit);
this.queryOptions.setResourceLimit(resourceLimit);
}
long memLimit = Env.getCurrentEnv().getAuth().getExecMemLimit(qualifiedUser);
if (memLimit > 0) {
this.queryOptions.setMemLimit(memLimit);
this.queryOptions.setMaxReservation(memLimit);
this.queryOptions.setInitialReservationTotalClaims(memLimit);
this.queryOptions.setBufferPoolLimit(memLimit);
}
}
private void initQueryOptions(ConnectContext context) {
this.queryOptions = context.getSessionVariable().toThrift();
this.queryOptions.setEnablePipelineEngine(SessionVariable.enablePipelineEngine());
this.queryOptions.setBeExecVersion(Config.be_exec_version);
this.queryOptions.setQueryTimeout(context.getExecTimeout());
this.queryOptions.setExecutionTimeout(context.getExecTimeout());
this.queryOptions.setEnableScanNodeRunSerial(context.getSessionVariable().isEnableScanRunSerial());
}
public long getJobId() {
return jobId;
}
public TUniqueId getQueryId() {
return queryId;
}
public int getScanRangeNum() {
return scanRangeNum;
}
public void setQueryId(TUniqueId queryId) {
this.queryId = queryId;
}
public void setQueryType(TQueryType type) {
this.queryOptions.setQueryType(type);
}
public void setExecPipEngine(boolean vec) {
this.queryOptions.setEnablePipelineEngine(vec);
}
public Status getExecStatus() {
return queryStatus;
}
public List<String> getDeltaUrls() {
return deltaUrls;
}
public Map<String, String> getLoadCounters() {
return loadCounters;
}
public String getTrackingUrl() {
return trackingUrl;
}
public void setExecMemoryLimit(long execMemoryLimit) {
this.queryOptions.setMemLimit(execMemoryLimit);
}
public void setLoadMemLimit(long loadMemLimit) {
this.queryOptions.setLoadMemLimit(loadMemLimit);
}
public void setTimeout(int timeout) {
this.queryOptions.setQueryTimeout(timeout);
this.queryOptions.setExecutionTimeout(timeout);
}
public void setLoadZeroTolerance(boolean loadZeroTolerance) {
this.queryGlobals.setLoadZeroTolerance(loadZeroTolerance);
}
public void clearExportStatus() {
lock.lock();
try {
this.backendExecStates.clear();
this.pipelineExecContexts.clear();
this.queryStatus.setStatus(new Status());
if (this.exportFiles == null) {
this.exportFiles = Lists.newArrayList();
}
this.exportFiles.clear();
this.needCheckBackendExecStates.clear();
this.needCheckPipelineExecContexts.clear();
} finally {
lock.unlock();
}
}
public List<TTabletCommitInfo> getCommitInfos() {
return commitInfos;
}
public List<TErrorTabletInfo> getErrorTabletInfos() {
return errorTabletInfos;
}
public Map<String, Integer> getBeToInstancesNum() {
Map<String, Integer> result = Maps.newTreeMap();
if (enablePipelineEngine) {
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
result.put(ctxs.brpcAddr.hostname.concat(":").concat("" + ctxs.brpcAddr.port),
ctxs.getInstanceNumber());
}
} else {
for (BackendExecStates states : beToExecStates.values()) {
result.put(states.brpcAddr.hostname.concat(":").concat("" + states.brpcAddr.port),
states.states.size());
}
}
return result;
}
public int getInstanceTotalNum() {
return instanceTotalNum;
}
private void prepare() {
for (PlanFragment fragment : fragments) {
fragmentExecParamsMap.put(fragment.getFragmentId(), new FragmentExecParams(fragment));
}
for (PlanFragment fragment : fragments) {
if (!(fragment.getSink() instanceof DataStreamSink)) {
continue;
}
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getDestFragment().getFragmentId());
params.inputFragments.add(fragment.getFragmentId());
}
coordAddress = new TNetworkAddress(localIP, Config.rpc_port);
this.idToBackend = Env.getCurrentSystemInfo().getIdToBackend();
if (LOG.isDebugEnabled()) {
LOG.debug("idToBackend size={}", idToBackend.size());
for (Map.Entry<Long, Backend> entry : idToBackend.entrySet()) {
Long backendID = entry.getKey();
Backend backend = entry.getValue();
LOG.debug("backend: {}-{}-{}", backendID, backend.getHost(), backend.getBePort());
}
}
}
private void lock() {
lock.lock();
}
private void unlock() {
lock.unlock();
}
private void traceInstance() {
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
int idx = 0;
sb.append("query id=").append(DebugUtil.printId(queryId)).append(",");
sb.append("fragment=[");
for (Map.Entry<PlanFragmentId, FragmentExecParams> entry : fragmentExecParamsMap.entrySet()) {
if (idx++ != 0) {
sb.append(",");
}
sb.append(entry.getKey());
entry.getValue().appendTo(sb);
}
sb.append("]");
LOG.debug(sb.toString());
}
}
public void exec() throws Exception {
if (LOG.isDebugEnabled() && !scanNodes.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, planNode: {}",
DebugUtil.printId(queryId), scanNodes.get(0).treeToThrift());
}
if (LOG.isDebugEnabled() && !fragments.isEmpty()) {
LOG.debug("debug: in Coordinator::exec. query id: {}, fragment: {}",
DebugUtil.printId(queryId), fragments.get(0).toThrift());
}
prepare();
computeScanRangeAssignment();
computeFragmentExecParams();
traceInstance();
QeProcessorImpl.INSTANCE.registerInstances(queryId, instanceIds.size());
PlanFragmentId topId = fragments.get(0).getFragmentId();
FragmentExecParams topParams = fragmentExecParamsMap.get(topId);
DataSink topDataSink = topParams.fragment.getSink();
this.timeoutDeadline = System.currentTimeMillis() + queryOptions.getExecutionTimeout() * 1000L;
if (topDataSink instanceof ResultSink || topDataSink instanceof ResultFileSink) {
TNetworkAddress execBeAddr = topParams.instanceExecParams.get(0).host;
receiver = new ResultReceiver(queryId, topParams.instanceExecParams.get(0).instanceId,
addressToBackendID.get(execBeAddr), toBrpcHost(execBeAddr), this.timeoutDeadline);
if (LOG.isDebugEnabled()) {
LOG.debug("dispatch query job: {} to {}", DebugUtil.printId(queryId),
topParams.instanceExecParams.get(0).host);
}
if (topDataSink instanceof ResultFileSink
&& ((ResultFileSink) topDataSink).getStorageType() == StorageBackend.StorageType.BROKER) {
ResultFileSink topResultFileSink = (ResultFileSink) topDataSink;
FsBroker broker = Env.getCurrentEnv().getBrokerMgr()
.getBroker(topResultFileSink.getBrokerName(), execBeAddr.getHostname());
topResultFileSink.setBrokerAddr(broker.host, broker.port);
}
} else {
this.queryOptions.setIsReportSuccess(true);
deltaUrls = Lists.newArrayList();
loadCounters = Maps.newHashMap();
List<Long> relatedBackendIds = Lists.newArrayList(addressToBackendID.values());
Env.getCurrentEnv().getLoadManager().initJobProgress(jobId, queryId, instanceIds,
relatedBackendIds);
Env.getCurrentEnv().getProgressManager().addTotalScanNums(String.valueOf(jobId), scanRangeNum);
LOG.info("dispatch load job: {} to {}", DebugUtil.printId(queryId), addressToBackendID.keySet());
}
executionProfile.markInstances(instanceIds);
if (!isPointQuery) {
if (enablePipelineEngine) {
sendPipelineCtx();
} else {
sendFragment();
}
} else {
OlapScanNode planRoot = (OlapScanNode) fragments.get(0).getPlanRoot();
Preconditions.checkState(planRoot.getScanTabletIds().size() == 1);
pointExec.setCandidateBackends(planRoot.getScanBackendIds());
pointExec.setTabletId(planRoot.getScanTabletIds().get(0));
}
}
/**
* The logic for sending query plan fragments is as follows:
* First, plan fragments are dependent. According to the order in "fragments" list,
* it must be ensured that on the BE side, the next fragment instance can be executed
* only after the previous fragment instance is ready,
* <p>
* In the previous logic, we will send fragment instances in sequence through RPC,
* and will wait for the RPC of the previous fragment instance to return successfully
* before sending the next one. But for some complex queries, this may lead to too many RPCs.
* <p>
* The optimized logic is as follows:
* 1. If the number of fragment instance is <= 2, the original logic is still used
* to complete the sending of fragments through at most 2 RPCs.
* 2. If the number of fragment instance is >= 3, first group all fragments by BE,
* and send all fragment instances to the corresponding BE node through the FIRST rpc,
* but these fragment instances will only perform the preparation phase but will not be actually executed.
* After that, the execution logic of all fragment instances is started through the SECOND RPC.
* <p>
* After optimization, a query on a BE node will only send two RPCs at most.
* Thereby reducing the "send fragment timeout" error caused by too many RPCs and BE unable to process in time.
*
* @throws TException
* @throws RpcException
* @throws UserException
*/
private void sendFragment() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
long memoryLimit = queryOptions.getMemLimit();
beToExecStates.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
instanceTotalNum += instanceNum;
List<TExecPlanFragmentParams> tParams = params.toThrift(backendIdx);
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceNum);
long newMemory = memoryLimit / rate;
for (TExecPlanFragmentParams tParam : tParams) {
tParam.query_options.setMemLimit(newMemory);
}
}
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
int instanceId = 0;
for (TExecPlanFragmentParams tParam : tParams) {
BackendExecState execState =
new BackendExecState(fragment.getFragmentId(), instanceId++,
profileFragmentId, tParam, this.addressToBackendID,
executionProfile.getLoadChannelProfile());
tParam.setFragmentNumOnHost(hostCounter.count(execState.address));
tParam.setBackendId(execState.backend.getId());
tParam.setNeedWaitExecutionTrigger(twoPhaseExecution);
backendExecStates.add(execState);
if (needCheckBackendState) {
needCheckBackendExecStates.add(execState);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
execState.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
BackendExecStates states = beToExecStates.get(execState.backend.getId());
if (states == null) {
states = new BackendExecStates(execState.backend.getId(), execState.brpcAddress,
twoPhaseExecution);
beToExecStates.putIfAbsent(execState.backend.getId(), states);
}
states.addState(execState);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<BackendExecStates, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
states.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execRemoteFragmentsAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (BackendExecStates states : beToExecStates.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
states.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(states, proxy, states.execPlanFragmentStartAsync(proxy)));
}
waitRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void sendPipelineCtx() throws TException, RpcException, UserException {
lock();
try {
Multiset<TNetworkAddress> hostCounter = HashMultiset.create();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
for (FInstanceExecParam fi : params.instanceExecParams) {
hostCounter.add(fi.host);
}
}
int backendIdx = 0;
int profileFragmentId = 0;
beToPipelineExecCtxs.clear();
boolean twoPhaseExecution = fragments.size() >= 2;
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
int instanceNum = params.instanceExecParams.size();
Preconditions.checkState(instanceNum > 0);
Map<TNetworkAddress, TPipelineFragmentParams> tParams = params.toTPipelineParams(backendIdx);
boolean needCheckBackendState = false;
if (queryOptions.getQueryType() == TQueryType.LOAD && profileFragmentId == 0) {
needCheckBackendState = true;
}
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap = new HashMap<TUniqueId, RuntimeProfile>();
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
for (TPipelineInstanceParams instanceParam : entry.getValue().local_params) {
String name = "Instance " + DebugUtil.printId(instanceParam.fragment_instance_id)
+ " (host=" + entry.getKey() + ")";
fragmentInstancesMap.put(instanceParam.fragment_instance_id, new RuntimeProfile(name));
}
}
for (Map.Entry<TNetworkAddress, TPipelineFragmentParams> entry : tParams.entrySet()) {
Long backendId = this.addressToBackendID.get(entry.getKey());
PipelineExecContext pipelineExecContext = new PipelineExecContext(fragment.getFragmentId(),
profileFragmentId, entry.getValue(), backendId, fragmentInstancesMap,
executionProfile.getLoadChannelProfile());
entry.getValue().setFragmentNumOnHost(hostCounter.count(pipelineExecContext.address));
entry.getValue().setBackendId(pipelineExecContext.backend.getId());
entry.getValue().setNeedWaitExecutionTrigger(twoPhaseExecution);
entry.getValue().setFragmentId(fragment.getFragmentId().asInt());
pipelineExecContexts.put(Pair.of(fragment.getFragmentId().asInt(), backendId), pipelineExecContext);
if (needCheckBackendState) {
needCheckPipelineExecContexts.add(pipelineExecContext);
if (LOG.isDebugEnabled()) {
LOG.debug("add need check backend {} for fragment, {} job: {}",
pipelineExecContext.backend.getId(), fragment.getFragmentId().asInt(), jobId);
}
}
PipelineExecContexts ctxs = beToPipelineExecCtxs.get(pipelineExecContext.backend.getId());
if (ctxs == null) {
ctxs = new PipelineExecContexts(pipelineExecContext.backend.getId(),
pipelineExecContext.brpcAddress, twoPhaseExecution,
entry.getValue().getFragmentNumOnHost());
beToPipelineExecCtxs.putIfAbsent(pipelineExecContext.backend.getId(), ctxs);
}
ctxs.addContext(pipelineExecContext);
++backendIdx;
}
profileFragmentId += 1;
}
List<Triple<PipelineExecContexts, BackendServiceProxy, Future<InternalService.PExecPlanFragmentResult>>>
futures = Lists.newArrayList();
Context parentSpanContext = Context.current();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execRemoteFragmentsAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
ctxs.unsetFields();
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execRemoteFragmentsAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send fragments");
if (twoPhaseExecution) {
futures.clear();
for (PipelineExecContexts ctxs : beToPipelineExecCtxs.values()) {
Span span = Telemetry.getNoopSpan();
if (ConnectContext.get() != null) {
span = ConnectContext.get().getTracer().spanBuilder("execPlanFragmentStartAsync")
.setParent(parentSpanContext).setSpanKind(SpanKind.CLIENT).startSpan();
}
ctxs.scopedSpan = new ScopedSpan(span);
BackendServiceProxy proxy = BackendServiceProxy.getInstance();
futures.add(ImmutableTriple.of(ctxs, proxy, ctxs.execPlanFragmentStartAsync(proxy)));
}
waitPipelineRpc(futures, this.timeoutDeadline - System.currentTimeMillis(), "send execution start");
}
attachInstanceProfileToFragmentProfile();
} finally {
unlock();
}
}
private void waitRpc(List<Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>>> futures,
long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
throw new UserException("timeout before waiting for " + operation + " RPC. Elapse(sec): " + (
(System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.getExecutionTimeout()));
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<BackendExecStates, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = "timeout when waiting for " + operation + " RPC. Wait(sec): " + timeoutMs / 1000;
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
private void waitPipelineRpc(List<Triple<PipelineExecContexts, BackendServiceProxy,
Future<PExecPlanFragmentResult>>> futures, long leftTimeMs,
String operation) throws RpcException, UserException {
if (leftTimeMs <= 0) {
throw new UserException("timeout before waiting for " + operation + " RPC. Elapse(sec): " + (
(System.currentTimeMillis() - timeoutDeadline) / 1000 + queryOptions.query_timeout));
}
long timeoutMs = Math.min(leftTimeMs, Config.remote_fragment_exec_timeout_ms);
for (Triple<PipelineExecContexts, BackendServiceProxy, Future<PExecPlanFragmentResult>> triple : futures) {
TStatusCode code;
String errMsg = null;
Exception exception = null;
Span span = triple.getLeft().scopedSpan.getSpan();
try {
PExecPlanFragmentResult result = triple.getRight().get(timeoutMs, TimeUnit.MILLISECONDS);
code = TStatusCode.findByValue(result.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
if (!result.getStatus().getErrorMsgsList().isEmpty()) {
errMsg = result.getStatus().getErrorMsgsList().get(0);
} else {
errMsg = operation + " failed. backend id: " + triple.getLeft().beId;
}
}
} catch (ExecutionException e) {
exception = e;
code = TStatusCode.THRIFT_RPC_ERROR;
triple.getMiddle().removeProxy(triple.getLeft().brpcAddr);
} catch (InterruptedException e) {
exception = e;
code = TStatusCode.INTERNAL_ERROR;
} catch (TimeoutException e) {
exception = e;
errMsg = "timeout when waiting for " + operation + " RPC. Wait(sec): " + timeoutMs / 1000;
code = TStatusCode.TIMEOUT;
}
try {
if (code != TStatusCode.OK) {
if (exception != null && errMsg == null) {
errMsg = operation + " failed. " + exception.getMessage();
}
queryStatus.setStatus(errMsg);
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
switch (code) {
case TIMEOUT:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
case THRIFT_RPC_ERROR:
MetricRepo.BE_COUNTER_QUERY_RPC_FAILED.getOrAdd(triple.getLeft().brpcAddr.hostname)
.increase(1L);
SimpleScheduler.addToBlacklist(triple.getLeft().beId, errMsg);
throw new RpcException(triple.getLeft().brpcAddr.hostname, errMsg, exception);
default:
throw new UserException(errMsg, exception);
}
}
} catch (Exception e) {
span.recordException(e);
throw e;
} finally {
triple.getLeft().scopedSpan.endSpan();
}
}
}
public List<String> getExportFiles() {
return exportFiles;
}
void updateExportFiles(List<String> files) {
lock.lock();
try {
if (exportFiles == null) {
exportFiles = Lists.newArrayList();
}
exportFiles.addAll(files);
} finally {
lock.unlock();
}
}
void updateDeltas(List<String> urls) {
lock.lock();
try {
deltaUrls.addAll(urls);
} finally {
lock.unlock();
}
}
private void updateLoadCounters(Map<String, String> newLoadCounters) {
lock.lock();
try {
long numRowsNormal = 0L;
String value = this.loadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal = Long.parseLong(value);
}
long numRowsAbnormal = 0L;
value = this.loadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal = Long.parseLong(value);
}
long numRowsUnselected = 0L;
value = this.loadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected = Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_NORMAL_ALL);
if (value != null) {
numRowsNormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadEtlTask.DPP_ABNORMAL_ALL);
if (value != null) {
numRowsAbnormal += Long.parseLong(value);
}
value = newLoadCounters.get(LoadJob.UNSELECTED_ROWS);
if (value != null) {
numRowsUnselected += Long.parseLong(value);
}
this.loadCounters.put(LoadEtlTask.DPP_NORMAL_ALL, "" + numRowsNormal);
this.loadCounters.put(LoadEtlTask.DPP_ABNORMAL_ALL, "" + numRowsAbnormal);
this.loadCounters.put(LoadJob.UNSELECTED_ROWS, "" + numRowsUnselected);
} finally {
lock.unlock();
}
}
private void updateCommitInfos(List<TTabletCommitInfo> commitInfos) {
lock.lock();
try {
this.commitInfos.addAll(commitInfos);
} finally {
lock.unlock();
}
}
private void updateErrorTabletInfos(List<TErrorTabletInfo> errorTabletInfos) {
lock.lock();
try {
if (this.errorTabletInfos.size() <= Config.max_error_tablet_of_broker_load) {
this.errorTabletInfos.addAll(errorTabletInfos.stream().limit(Config.max_error_tablet_of_broker_load
- this.errorTabletInfos.size()).collect(Collectors.toList()));
}
} finally {
lock.unlock();
}
}
private void updateStatus(Status status, TUniqueId instanceId) {
lock.lock();
try {
if (returnedAllResults && status.isCancelled()) {
return;
}
if (status.ok()) {
return;
}
if (!queryStatus.ok()) {
return;
}
queryStatus.setStatus(status);
LOG.warn("one instance report fail throw updateStatus(), need cancel. job id: {},"
+ " query id: {}, instance id: {}, error message: {}",
jobId, DebugUtil.printId(queryId), instanceId != null ? DebugUtil.printId(instanceId) : "NaN",
status.getErrorMsg());
if (status.getErrorCode() == TStatusCode.TIMEOUT) {
cancelInternal(Types.PPlanFragmentCancelReason.TIMEOUT);
} else {
cancelInternal(Types.PPlanFragmentCancelReason.INTERNAL_ERROR);
}
} finally {
lock.unlock();
}
}
public RowBatch getNext() throws Exception {
if (receiver == null) {
throw new UserException("There is no receiver.");
}
RowBatch resultBatch;
Status status = new Status();
if (!isPointQuery) {
resultBatch = receiver.getNext(status);
} else {
resultBatch = pointExec.getNext(status);
}
if (!status.ok()) {
LOG.warn("get next fail, need cancel. query id: {}", DebugUtil.printId(queryId));
}
updateStatus(status, null /* no instance id */);
Status copyStatus = null;
lock();
try {
copyStatus = new Status(queryStatus);
} finally {
unlock();
}
if (!copyStatus.ok()) {
if (Strings.isNullOrEmpty(copyStatus.getErrorMsg())) {
copyStatus.rewriteErrorMsg();
}
if (copyStatus.isRpcError()) {
throw new RpcException(null, copyStatus.getErrorMsg());
} else {
String errMsg = copyStatus.getErrorMsg();
LOG.warn("query failed: {}", errMsg);
int hostIndex = errMsg.indexOf("host");
if (hostIndex != -1) {
errMsg = errMsg.substring(0, hostIndex);
}
throw new UserException(errMsg);
}
}
if (resultBatch.isEos()) {
this.returnedAllResults = true;
Long numLimitRows = fragments.get(0).getPlanRoot().getLimit();
boolean hasLimit = numLimitRows > 0;
if (!isBlockQuery && instanceIds.size() > 1 && hasLimit && numReceivedRows >= numLimitRows) {
LOG.debug("no block query, return num >= limit rows, need cancel");
cancelInternal(Types.PPlanFragmentCancelReason.LIMIT_REACH);
}
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().dryRunQuery) {
numReceivedRows = 0;
numReceivedRows += resultBatch.getQueryStatistics().getReturnedRows();
}
} else if (resultBatch.getBatch() != null) {
numReceivedRows += resultBatch.getBatch().getRowsSize();
}
return resultBatch;
}
public void cancel() {
cancel(Types.PPlanFragmentCancelReason.USER_CANCEL);
}
public void cancel(Types.PPlanFragmentCancelReason cancelReason) {
lock();
try {
if (!queryStatus.ok()) {
return;
} else {
queryStatus.setStatus(Status.CANCELLED);
}
LOG.warn("cancel execution of query, this is outside invoke");
cancelInternal(cancelReason);
} finally {
unlock();
}
}
private void cancelInternal(Types.PPlanFragmentCancelReason cancelReason) {
if (null != receiver) {
receiver.cancel();
}
if (null != pointExec) {
pointExec.cancel();
return;
}
cancelRemoteFragmentsAsync(cancelReason);
executionProfile.onCancel();
}
private void cancelRemoteFragmentsAsync(Types.PPlanFragmentCancelReason cancelReason) {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : pipelineExecContexts.values()) {
ctx.cancelFragmentInstance(cancelReason);
}
} else {
for (BackendExecState backendExecState : backendExecStates) {
backendExecState.cancelFragmentInstance(cancelReason);
}
}
}
private void computeFragmentExecParams() throws Exception {
computeFragmentHosts();
instanceIds.clear();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (LOG.isDebugEnabled()) {
LOG.debug("fragment {} has instances {}",
params.fragment.getFragmentId(), params.instanceExecParams.size());
}
for (int j = 0; j < params.instanceExecParams.size(); ++j) {
TUniqueId instanceId = new TUniqueId();
instanceId.setHi(queryId.hi);
instanceId.setLo(queryId.lo + instanceIds.size() + 1);
params.instanceExecParams.get(j).instanceId = instanceId;
instanceIds.add(instanceId);
}
}
computeMultiCastFragmentParams();
assignRuntimeFilterAddr();
for (FragmentExecParams params : fragmentExecParamsMap.values()) {
if (params.fragment instanceof MultiCastPlanFragment) {
continue;
}
PlanFragment destFragment = params.fragment.getDestFragment();
if (destFragment == null) {
continue;
}
FragmentExecParams destParams = fragmentExecParamsMap.get(destFragment.getFragmentId());
DataSink sink = params.fragment.getSink();
PlanNodeId exchId = sink.getExchNodeId();
PlanNode exchNode = PlanNode.findPlanNodeFromPlanNodeId(destFragment.getPlanRoot(), exchId);
Preconditions.checkState(exchNode != null, "exchNode is null");
Preconditions.checkState(exchNode instanceof ExchangeNode,
"exchNode is not ExchangeNode" + exchNode.getId().toString());
if (destParams.perExchNumSenders.get(exchId.asInt()) == null) {
destParams.perExchNumSenders.put(exchId.asInt(), params.instanceExecParams.size());
} else {
destParams.perExchNumSenders.put(exchId.asInt(),
params.instanceExecParams.size() + destParams.perExchNumSenders.get(exchId.asInt()));
}
if (sink.getOutputPartition() != null
&& sink.getOutputPartition().isBucketShuffleHashPartition()) {
Preconditions.checkState(bucketShuffleJoinController
.isBucketShuffleJoin(destFragment.getFragmentId().asInt()), "Sink is"
+ "Bucket Shuffle Partition, The destFragment must have bucket shuffle join node ");
int bucketSeq = 0;
int bucketNum = bucketShuffleJoinController.getFragmentBucketNum(destFragment.getFragmentId());
if (destParams.instanceExecParams.size() == 1 && (bucketNum == 0
|| destParams.instanceExecParams.get(0).bucketSeqSet.isEmpty())) {
bucketNum = 1;
destParams.instanceExecParams.get(0).bucketSeqSet.add(0);
}
TNetworkAddress dummyServer = new TNetworkAddress("0.0.0.0", 0);
while (bucketSeq < bucketNum) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = new TUniqueId(-1, -1);
dest.server = dummyServer;
dest.setBrpcServer(dummyServer);
for (FInstanceExecParam instanceExecParams : destParams.instanceExecParams) {
if (instanceExecParams.bucketSeqSet.contains(bucketSeq)) {
dest.fragment_instance_id = instanceExecParams.instanceId;
dest.server = toRpcHost(instanceExecParams.host);
dest.setBrpcServer(toBrpcHost(instanceExecParams.host));
break;
}
}
bucketSeq++;
params.destinations.add(dest);
}
} else {
if (enablePipelineEngine && enableShareHashTableForBroadcastJoin
&& ((ExchangeNode) exchNode).isRightChildOfBroadcastHashJoin()) {
Map<TNetworkAddress, FInstanceExecParam> destHosts = new HashMap<>();
destParams.instanceExecParams.forEach(param -> {
if (destHosts.containsKey(param.host)) {
destHosts.get(param.host).instancesSharingHashTable.add(param.instanceId);
} else {
destHosts.put(param.host, param);
param.buildHashTableForBroadcastJoin = true;
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = param.instanceId;
try {
dest.server = toRpcHost(param.host);
dest.setBrpcServer(toBrpcHost(param.host));
} catch (Exception e) {
throw new RuntimeException(e);
}
params.destinations.add(dest);
}
});
} else {
for (int j = 0; j < destParams.instanceExecParams.size(); ++j) {
TPlanFragmentDestination dest = new TPlanFragmentDestination();
dest.fragment_instance_id = destParams.instanceExecParams.get(j).instanceId;
dest.server = toRpcHost(destParams.instanceExecParams.get(j).host);
dest.setBrpcServer(toBrpcHost(destParams.instanceExecParams.get(j).host));
params.destinations.add(dest);
}
}
}
}
}
private TNetworkAddress toRpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
TNetworkAddress dest = new TNetworkAddress(backend.getHost(), backend.getBeRpcPort());
return dest;
}
private TNetworkAddress toBrpcHost(TNetworkAddress host) throws Exception {
Backend backend = Env.getCurrentSystemInfo().getBackendWithBePort(
host.getHostname(), host.getPort());
if (backend == null) {
throw new UserException(SystemInfoService.NO_BACKEND_LOAD_AVAILABLE_MSG);
}
if (backend.getBrpcPort() < 0) {
return null;
}
return new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
}
private boolean containsUnionNode(PlanNode node) {
if (node instanceof UnionNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof UnionNode) {
return true;
} else {
return containsUnionNode(child);
}
}
return false;
}
private boolean containsIntersectNode(PlanNode node) {
if (node instanceof IntersectNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof IntersectNode) {
return true;
} else {
return containsIntersectNode(child);
}
}
return false;
}
private boolean containsExceptNode(PlanNode node) {
if (node instanceof ExceptNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof ExceptNode) {
return true;
} else {
return containsExceptNode(child);
}
}
return false;
}
private boolean containsSetOperationNode(PlanNode node) {
if (node instanceof SetOperationNode) {
return true;
}
for (PlanNode child : node.getChildren()) {
if (child instanceof ExchangeNode) {
continue;
} else if (child instanceof SetOperationNode) {
return true;
} else {
return containsSetOperationNode(child);
}
}
return false;
}
private void computeFragmentHosts() throws Exception {
for (int i = fragments.size() - 1; i >= 0; --i) {
PlanFragment fragment = fragments.get(i);
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
if (fragment.getDataPartition() == DataPartition.UNPARTITIONED) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (((ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()) || (isAllExternalScan
&& Config.prefer_compute_node_for_external_table)) && !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
LOG.warn("DataPartition UNPARTITIONED, no scanNode Backend available");
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport,
0, params);
params.instanceExecParams.add(instanceParam);
continue;
}
Pair<PlanNode, PlanNode> pairNodes = findLeftmostNode(fragment.getPlanRoot());
PlanNode fatherNode = pairNodes.first;
PlanNode leftMostNode = pairNodes.second;
/*
* Case A:
* if the left most is ScanNode, which means there is no child fragment,
* we should assign fragment instances on every scan node hosts.
* Case B:
* if not, there should be exchange nodes to collect all data from child fragments(input fragments),
* so we should assign fragment instances corresponding to the child fragments' host
*/
if (!(leftMostNode instanceof ScanNode)) {
int inputFragmentIndex = 0;
int maxParallelism = 0;
int childrenCount = (fatherNode != null) ? fatherNode.getChildren().size() : 1;
for (int j = 0; j < childrenCount; j++) {
int currentChildFragmentParallelism
= fragmentExecParamsMap.get(fragment.getChild(j).getFragmentId()).instanceExecParams.size();
if (currentChildFragmentParallelism > maxParallelism) {
maxParallelism = currentChildFragmentParallelism;
inputFragmentIndex = j;
}
}
PlanFragmentId inputFragmentId = fragment.getChild(inputFragmentIndex).getFragmentId();
int exchangeInstances = -1;
if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable() != null) {
exchangeInstances = ConnectContext.get().getSessionVariable().getExchangeInstanceParallel();
}
if (leftMostNode.getNumInstances() == 1) {
exchangeInstances = 1;
}
if (exchangeInstances > 0 && fragmentExecParamsMap.get(inputFragmentId)
.instanceExecParams.size() > exchangeInstances) {
Set<TNetworkAddress> hostSet = Sets.newHashSet();
for (FInstanceExecParam execParams :
fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
hostSet.add(execParams.host);
}
List<TNetworkAddress> hosts = Lists.newArrayList(hostSet);
Collections.shuffle(hosts, instanceRandom);
for (int index = 0; index < exchangeInstances; index++) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null,
hosts.get(index % hosts.size()), 0, params);
params.instanceExecParams.add(instanceParam);
}
} else {
for (FInstanceExecParam execParams
: fragmentExecParamsMap.get(inputFragmentId).instanceExecParams) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execParams.host, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
Collections.shuffle(params.instanceExecParams, instanceRandom);
continue;
}
int parallelExecInstanceNum = fragment.getParallelExecNum();
if ((isColocateFragment(fragment, fragment.getPlanRoot())
&& fragmentIdToSeqToAddressMap.containsKey(fragment.getFragmentId())
&& fragmentIdToSeqToAddressMap.get(fragment.getFragmentId()).size() > 0)) {
computeColocateJoinInstanceParam(fragment.getFragmentId(), parallelExecInstanceNum, params);
} else if (bucketShuffleJoinController.isBucketShuffleJoin(fragment.getFragmentId().asInt())) {
bucketShuffleJoinController.computeInstanceParam(fragment.getFragmentId(),
parallelExecInstanceNum, params);
} else {
for (Entry<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> entry : fragmentExecParamsMap.get(
fragment.getFragmentId()).scanRangeAssignment.entrySet()) {
TNetworkAddress key = entry.getKey();
Map<Integer, List<TScanRangeParams>> value = entry.getValue();
for (Integer planNodeId : value.keySet()) {
List<TScanRangeParams> perNodeScanRanges = value.get(planNodeId);
List<List<TScanRangeParams>> perInstanceScanRanges = Lists.newArrayList();
List<Boolean> sharedScanOpts = Lists.newArrayList();
Optional<ScanNode> node = scanNodes.stream().filter(scanNode -> {
return scanNode.getId().asInt() == planNodeId;
}).findFirst();
if (!enablePipelineEngine || perNodeScanRanges.size() > parallelExecInstanceNum
|| (node.isPresent() && node.get().getShouldColoScan())
|| (node.isPresent() && node.get() instanceof FileScanNode)
|| Config.disable_shared_scan) {
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(perNodeScanRanges.size(), parallelExecInstanceNum);
}
perInstanceScanRanges = ListUtil.splitBySize(perNodeScanRanges,
expectedInstanceNum);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), false);
} else {
int expectedInstanceNum = Math.min(parallelExecInstanceNum,
leftMostNode.getNumInstances());
expectedInstanceNum = Math.max(expectedInstanceNum, 1);
perInstanceScanRanges = Collections.nCopies(expectedInstanceNum, perNodeScanRanges);
sharedScanOpts = Collections.nCopies(perInstanceScanRanges.size(), true);
}
LOG.debug("scan range number per instance is: {}", perInstanceScanRanges.size());
for (int j = 0; j < perInstanceScanRanges.size(); j++) {
List<TScanRangeParams> scanRangeParams = perInstanceScanRanges.get(j);
boolean sharedScan = sharedScanOpts.get(j);
FInstanceExecParam instanceParam = new FInstanceExecParam(null, key, 0, params);
instanceParam.perNodeScanRanges.put(planNodeId, scanRangeParams);
instanceParam.perNodeSharedScans.put(planNodeId, sharedScan);
params.instanceExecParams.add(instanceParam);
}
}
}
}
if (params.instanceExecParams.isEmpty()) {
Reference<Long> backendIdRef = new Reference<Long>();
TNetworkAddress execHostport;
if (ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()
&& !addressToBackendID.isEmpty()) {
execHostport = SimpleScheduler.getHostByCurrentBackend(addressToBackendID);
} else {
execHostport = SimpleScheduler.getHost(this.idToBackend, backendIdRef);
}
if (execHostport == null) {
throw new UserException(SystemInfoService.NO_SCAN_NODE_BACKEND_AVAILABLE_MSG);
}
if (backendIdRef.getRef() != null) {
this.addressToBackendID.put(execHostport, backendIdRef.getRef());
}
FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport, 0, params);
params.instanceExecParams.add(instanceParam);
}
}
}
private void assignRuntimeFilterAddr() throws Exception {
for (PlanFragment fragment : fragments) {
FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
for (RuntimeFilterId rid : fragment.getTargetRuntimeFilterIds()) {
List<FRuntimeFilterTargetParam> targetFragments = ridToTargetParam.computeIfAbsent(rid,
k -> new ArrayList<>());
for (final FInstanceExecParam instance : params.instanceExecParams) {
targetFragments.add(new FRuntimeFilterTargetParam(instance.instanceId, toBrpcHost(instance.host)));
}
}
for (RuntimeFilterId rid : fragment.getBuilderRuntimeFilterIds()) {
ridToBuilderNum.merge(rid, params.instanceExecParams.size(), Integer::sum);
}
}
FragmentExecParams uppermostParams = fragmentExecParamsMap.get(fragments.get(0).getFragmentId());
runtimeFilterMergeAddr = toBrpcHost(uppermostParams.instanceExecParams.get(0).host);
runtimeFilterMergeInstanceId = uppermostParams.instanceExecParams.get(0).instanceId;
}
private boolean isColocateFragment(PlanFragment planFragment, PlanNode node) {
if (ConnectContext.get() != null) {
if (ConnectContext.get().getSessionVariable().isDisableColocatePlan()) {
return false;
}
}
if (colocateFragmentIds.contains(node.getFragmentId().asInt())) {
return true;
}
if (planFragment.hasColocatePlanNode()) {
colocateFragmentIds.add(planFragment.getId().asInt());
return true;
}
return false;
}
private Pair<PlanNode, PlanNode> findLeftmostNode(PlanNode plan) {
PlanNode newPlan = plan;
PlanNode fatherPlan = null;
while (newPlan.getChildren().size() != 0 && !(newPlan instanceof ExchangeNode)) {
fatherPlan = newPlan;
newPlan = newPlan.getChild(0);
}
return Pair.of(fatherPlan, newPlan);
}
private <K, V> V findOrInsert(Map<K, V> m, final K key, final V defaultVal) {
V value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private List<TScanRangeParams> findOrInsert(Map<Integer, List<TScanRangeParams>> m, Integer key,
ArrayList<TScanRangeParams> defaultVal) {
List<TScanRangeParams> value = m.get(key);
if (value == null) {
m.put(key, defaultVal);
value = defaultVal;
}
return value;
}
private void computeColocateJoinInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges : bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.bucketSeqSet.add(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
private Map<TNetworkAddress, Long> getReplicaNumPerHostForOlapTable() {
Map<TNetworkAddress, Long> replicaNumPerHost = Maps.newHashMap();
for (ScanNode scanNode : scanNodes) {
List<TScanRangeLocations> locationsList = scanNode.getScanRangeLocations(0);
for (TScanRangeLocations locations : locationsList) {
for (TScanRangeLocation location : locations.locations) {
if (replicaNumPerHost.containsKey(location.server)) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) + 1L);
} else {
replicaNumPerHost.put(location.server, 1L);
}
}
}
}
return replicaNumPerHost;
}
private void computeScanRangeAssignment() throws Exception {
if (isPointQuery) {
List<TScanRangeLocations> locations = ((OlapScanNode) scanNodes.get(0)).lazyEvaluateRangeLocations();
Preconditions.checkNotNull(locations);
return;
}
Map<TNetworkAddress, Long> assignedBytesPerHost = Maps.newHashMap();
Map<TNetworkAddress, Long> replicaNumPerHost = getReplicaNumPerHostForOlapTable();
Collections.shuffle(scanNodes);
for (ScanNode scanNode : scanNodes) {
if (!(scanNode instanceof ExternalScanNode)) {
isAllExternalScan = false;
}
List<TScanRangeLocations> locations;
locations = scanNode.getScanRangeLocations(0);
if (locations == null) {
continue;
}
Collections.shuffle(locations);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.computeIfAbsent(scanNode.getFragmentId(),
k -> Sets.newHashSet());
scanNodeIds.add(scanNode.getId().asInt());
if (scanNode instanceof FileQueryScanNode) {
fileScanRangeParamsMap.put(
scanNode.getId().asInt(), ((FileQueryScanNode) scanNode).getFileScanRangeParams());
}
FragmentScanRangeAssignment assignment
= fragmentExecParamsMap.get(scanNode.getFragmentId()).scanRangeAssignment;
boolean fragmentContainsColocateJoin = isColocateFragment(scanNode.getFragment(),
scanNode.getFragment().getPlanRoot());
boolean fragmentContainsBucketShuffleJoin = bucketShuffleJoinController
.isBucketShuffleJoin(scanNode.getFragmentId().asInt(), scanNode.getFragment().getPlanRoot());
if (fragmentContainsColocateJoin) {
computeScanRangeAssignmentByColocate((OlapScanNode) scanNode, assignedBytesPerHost, replicaNumPerHost);
}
if (fragmentContainsBucketShuffleJoin) {
bucketShuffleJoinController.computeScanRangeAssignmentByBucket((OlapScanNode) scanNode,
idToBackend, addressToBackendID, replicaNumPerHost);
}
if (!(fragmentContainsColocateJoin || fragmentContainsBucketShuffleJoin)) {
computeScanRangeAssignmentByScheduler(scanNode, locations, assignment, assignedBytesPerHost,
replicaNumPerHost);
}
}
}
private void computeScanRangeAssignmentByColocate(
final OlapScanNode scanNode, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdTobucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdTobucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0),
scanNode.getFragmentId(), bucketSeq, assignedBytesPerHost, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost)
throws Exception {
Reference<Long> backendIdRef = new Reference<Long>();
selectBackendsByRoundRobin(seqLocation, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
public TScanRangeLocation selectBackendsByRoundRobin(TScanRangeLocations seqLocation,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
if (!Config.enable_local_replica_selection) {
return selectBackendsByRoundRobin(seqLocation.getLocations(), assignedBytesPerHost, replicaNumPerHost,
backendIdRef);
}
List<TScanRangeLocation> localLocations = new ArrayList<>();
List<TScanRangeLocation> nonlocalLocations = new ArrayList<>();
long localBeId = Env.getCurrentSystemInfo().getBackendIdByHost(FrontendOptions.getLocalHostAddress());
for (final TScanRangeLocation location : seqLocation.getLocations()) {
if (location.backend_id == localBeId) {
localLocations.add(location);
} else {
nonlocalLocations.add(location);
}
}
try {
return selectBackendsByRoundRobin(localLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
} catch (UserException ue) {
if (!Config.enable_local_replica_selection_fallback) {
throw ue;
}
return selectBackendsByRoundRobin(nonlocalLocations, assignedBytesPerHost, replicaNumPerHost, backendIdRef);
}
}
public TScanRangeLocation selectBackendsByRoundRobin(List<TScanRangeLocation> locations,
Map<TNetworkAddress, Long> assignedBytesPerHost, Map<TNetworkAddress, Long> replicaNumPerHost,
Reference<Long> backendIdRef) throws UserException {
Long minAssignedBytes = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
TScanRangeLocation minLocation = null;
Long step = 1L;
for (final TScanRangeLocation location : locations) {
Long assignedBytes = findOrInsert(assignedBytesPerHost, location.server, 0L);
if (assignedBytes < minAssignedBytes || (assignedBytes.equals(minAssignedBytes)
&& replicaNumPerHost.get(location.server) < minReplicaNum)) {
minAssignedBytes = assignedBytes;
minReplicaNum = replicaNumPerHost.get(location.server);
minLocation = location;
}
}
for (TScanRangeLocation location : locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
TScanRangeLocation location = SimpleScheduler.getLocation(minLocation, locations,
this.idToBackend, backendIdRef);
assignedBytesPerHost.put(location.server, assignedBytesPerHost.get(location.server) + step);
return location;
}
private void computeScanRangeAssignmentByScheduler(
final ScanNode scanNode,
final List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment,
Map<TNetworkAddress, Long> assignedBytesPerHost,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
for (TScanRangeLocations scanRangeLocations : locations) {
Reference<Long> backendIdRef = new Reference<Long>();
TScanRangeLocation minLocation = selectBackendsByRoundRobin(scanRangeLocations,
assignedBytesPerHost, replicaNumPerHost, backendIdRef);
Backend backend = this.idToBackend.get(backendIdRef.getRef());
TNetworkAddress execHostPort = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.addressToBackendID.put(execHostPort, backendIdRef.getRef());
Map<Integer, List<TScanRangeParams>> scanRanges = findOrInsert(assignment, execHostPort,
new HashMap<Integer, List<TScanRangeParams>>());
List<TScanRangeParams> scanRangeParamsList = findOrInsert(scanRanges, scanNode.getId().asInt(),
new ArrayList<TScanRangeParams>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = scanRangeLocations.scan_range;
scanRangeParams.setVolumeId(minLocation.volume_id);
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
private void updateScanRangeNumByScanRange(TScanRangeParams param) {
TScanRange scanRange = param.getScanRange();
if (scanRange == null) {
return;
}
TBrokerScanRange brokerScanRange = scanRange.getBrokerScanRange();
if (brokerScanRange != null) {
scanRangeNum += brokerScanRange.getRanges().size();
}
TExternalScanRange externalScanRange = scanRange.getExtScanRange();
if (externalScanRange != null) {
TFileScanRange fileScanRange = externalScanRange.getFileScanRange();
if (fileScanRange != null) {
scanRangeNum += fileScanRange.getRanges().size();
}
}
TPaloScanRange paloScanRange = scanRange.getPaloScanRange();
if (paloScanRange != null) {
scanRangeNum = scanRangeNum + 1;
}
}
public void updateFragmentExecStatus(TReportExecStatusParams params) {
if (enablePipelineEngine) {
PipelineExecContext ctx = pipelineExecContexts.get(Pair.of(params.getFragmentId(), params.getBackendId()));
if (!ctx.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
ctx.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (ctx.fragmentInstancesMap.get(params.fragment_instance_id).getIsDone()) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
} else {
if (params.backend_num >= backendExecStates.size()) {
LOG.warn("unknown backend number: {}, expected less than: {}",
params.backend_num, backendExecStates.size());
return;
}
BackendExecState execState = backendExecStates.get(params.backend_num);
if (!execState.updateProfile(params)) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
execState.printProfile(builder);
LOG.debug("profile for query_id={} instance_id={}\n{}",
DebugUtil.printId(queryId),
DebugUtil.printId(params.getFragmentInstanceId()),
builder.toString());
}
Status status = new Status(params.status);
if (!(returnedAllResults && status.isCancelled()) && !status.ok()) {
LOG.warn("one instance report fail, query_id={} instance_id={}, error message: {}",
DebugUtil.printId(queryId), DebugUtil.printId(params.getFragmentInstanceId()),
status.getErrorMsg());
updateStatus(status, params.getFragmentInstanceId());
}
if (execState.done) {
if (params.isSetDeltaUrls()) {
updateDeltas(params.getDeltaUrls());
}
if (params.isSetLoadCounters()) {
updateLoadCounters(params.getLoadCounters());
}
if (params.isSetTrackingUrl()) {
trackingUrl = params.getTrackingUrl();
}
if (params.isSetExportFiles()) {
updateExportFiles(params.getExportFiles());
}
if (params.isSetCommitInfos()) {
updateCommitInfos(params.getCommitInfos());
}
if (params.isSetErrorTabletInfos()) {
updateErrorTabletInfos(params.getErrorTabletInfos());
}
executionProfile.markOneInstanceDone(params.getFragmentInstanceId());
}
}
if (params.isSetLoadedRows() && jobId != -1) {
Env.getCurrentEnv().getLoadManager().updateJobProgress(
jobId, params.getBackendId(), params.getQueryId(), params.getFragmentInstanceId(),
params.getLoadedRows(), params.getLoadedBytes(), params.isDone());
Env.getCurrentEnv().getProgressManager().updateProgress(String.valueOf(jobId),
params.getQueryId(), params.getFragmentInstanceId(), params.getFinishedScanRanges());
}
}
/*
* Waiting the coordinator finish executing.
* return false if waiting timeout.
* return true otherwise.
* NOTICE: return true does not mean that coordinator executed success,
* the caller should check queryStatus for result.
*
* We divide the entire waiting process into multiple rounds,
* with a maximum of 30 seconds per round. And after each round of waiting,
* check the status of the BE. If the BE status is abnormal, the wait is ended
* and the result is returned. Otherwise, continue to the next round of waiting.
* This method mainly avoids the problem that the Coordinator waits for a long time
* after some BE can no long return the result due to some exception, such as BE is down.
*/
public boolean join(int timeoutS) {
final long fixedMaxWaitTime = 30;
long leftTimeoutS = timeoutS;
while (leftTimeoutS > 0) {
long waitTime = Math.min(leftTimeoutS, fixedMaxWaitTime);
boolean awaitRes = false;
try {
awaitRes = executionProfile.awaitAllInstancesDone(waitTime);
} catch (InterruptedException e) {
}
if (awaitRes) {
return true;
}
if (!checkBackendState()) {
return true;
}
leftTimeoutS -= waitTime;
}
return false;
}
/*
* Check the state of backends in needCheckBackendExecStates.
* return true if all of them are OK. Otherwise, return false.
*/
private boolean checkBackendState() {
if (enablePipelineEngine) {
for (PipelineExecContext ctx : needCheckPipelineExecContexts) {
if (!ctx.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ ctx.backend.getId() + " is down");
return false;
}
}
} else {
for (BackendExecState backendExecState : needCheckBackendExecStates) {
if (!backendExecState.isBackendStateHealthy()) {
queryStatus = new Status(TStatusCode.INTERNAL_ERROR, "backend "
+ backendExecState.backend.getId() + " is down");
return false;
}
}
}
return true;
}
public boolean isDone() {
return executionProfile.isAllInstancesDone();
}
class FragmentScanRangeAssignment
extends HashMap<TNetworkAddress, Map<Integer, List<TScanRangeParams>>> {
}
class BucketSeqToScanRange extends HashMap<Integer, Map<Integer, List<TScanRangeParams>>> {
}
class BucketShuffleJoinController {
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdBucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap
= Maps.newHashMap();
private final Map<PlanFragmentId, Map<Long, Integer>> fragmentIdToBuckendIdBucketCountMap = Maps.newHashMap();
private final Map<PlanFragmentId, Integer> fragmentIdToBucketNumMap = Maps.newHashMap();
private final Set<Integer> bucketShuffleFragmentIds = new HashSet<>();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds;
public BucketShuffleJoinController(Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds) {
this.fragmentIdToScanNodeIds = fragmentIdToScanNodeIds;
}
private boolean isBucketShuffleJoin(int fragmentId, PlanNode node) {
if (ConnectContext.get() != null) {
if (!ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin()
&& !ConnectContext.get().getSessionVariable().isEnableNereidsPlanner()) {
return false;
}
}
if (fragmentId != node.getFragmentId().asInt()) {
return false;
}
if (bucketShuffleFragmentIds.contains(fragmentId)) {
return true;
}
if (node instanceof HashJoinNode) {
HashJoinNode joinNode = (HashJoinNode) node;
if (joinNode.isBucketShuffle()) {
bucketShuffleFragmentIds.add(joinNode.getFragmentId().asInt());
return true;
}
}
for (PlanNode childNode : node.getChildren()) {
if (isBucketShuffleJoin(fragmentId, childNode)) {
return true;
}
}
return false;
}
private boolean isBucketShuffleJoin(int fragmentId) {
return bucketShuffleFragmentIds.contains(fragmentId);
}
private int getFragmentBucketNum(PlanFragmentId fragmentId) {
return fragmentIdToBucketNumMap.get(fragmentId);
}
private void getExecHostPortForFragmentIDAndBucketSeq(TScanRangeLocations seqLocation,
PlanFragmentId fragmentId, Integer bucketSeq, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
Map<Long, Integer> buckendIdToBucketCountMap = fragmentIdToBuckendIdBucketCountMap.get(fragmentId);
int maxBucketNum = Integer.MAX_VALUE;
long buckendId = Long.MAX_VALUE;
Long minReplicaNum = Long.MAX_VALUE;
for (TScanRangeLocation location : seqLocation.locations) {
if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) < maxBucketNum) {
maxBucketNum = buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0);
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
} else if (buckendIdToBucketCountMap.getOrDefault(location.backend_id, 0) == maxBucketNum
&& replicaNumPerHost.get(location.server) < minReplicaNum) {
buckendId = location.backend_id;
minReplicaNum = replicaNumPerHost.get(location.server);
}
}
Reference<Long> backendIdRef = new Reference<>();
TNetworkAddress execHostPort = SimpleScheduler.getHost(buckendId,
seqLocation.locations, idToBackend, backendIdRef);
if (backendIdRef.getRef() != buckendId) {
buckendIdToBucketCountMap.put(backendIdRef.getRef(),
buckendIdToBucketCountMap.getOrDefault(backendIdRef.getRef(), 0) + 1);
} else {
buckendIdToBucketCountMap.put(buckendId, buckendIdToBucketCountMap.getOrDefault(buckendId, 0) + 1);
}
for (TScanRangeLocation location : seqLocation.locations) {
replicaNumPerHost.put(location.server, replicaNumPerHost.get(location.server) - 1);
}
addressToBackendID.put(execHostPort, backendIdRef.getRef());
this.fragmentIdToSeqToAddressMap.get(fragmentId).put(bucketSeq, execHostPort);
}
private void computeScanRangeAssignmentByBucket(
final OlapScanNode scanNode, ImmutableMap<Long, Backend> idToBackend,
Map<TNetworkAddress, Long> addressToBackendID,
Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
if (!fragmentIdToSeqToAddressMap.containsKey(scanNode.getFragmentId())) {
int bucketNum = 0;
if (scanNode.getOlapTable().isColocateTable()) {
bucketNum = scanNode.getOlapTable().getDefaultDistributionInfo().getBucketNum();
} else {
bucketNum = (int) (scanNode.getTotalTabletsNum());
}
fragmentIdToBucketNumMap.put(scanNode.getFragmentId(), bucketNum);
fragmentIdToSeqToAddressMap.put(scanNode.getFragmentId(), new HashMap<>());
fragmentIdBucketSeqToScanRangeMap.put(scanNode.getFragmentId(), new BucketSeqToScanRange());
fragmentIdToBuckendIdBucketCountMap.put(scanNode.getFragmentId(), new HashMap<>());
}
Map<Integer, TNetworkAddress> bucketSeqToAddress
= fragmentIdToSeqToAddressMap.get(scanNode.getFragmentId());
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(scanNode.getFragmentId());
for (Integer bucketSeq : scanNode.bucketSeq2locations.keySet()) {
List<TScanRangeLocations> locations = scanNode.bucketSeq2locations.get(bucketSeq);
if (!bucketSeqToAddress.containsKey(bucketSeq)) {
getExecHostPortForFragmentIDAndBucketSeq(locations.get(0), scanNode.getFragmentId(),
bucketSeq, idToBackend, addressToBackendID, replicaNumPerHost);
}
for (TScanRangeLocations location : locations) {
Map<Integer, List<TScanRangeParams>> scanRanges =
findOrInsert(bucketSeqToScanRange, bucketSeq, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList =
findOrInsert(scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = location.scan_range;
scanRangeParamsList.add(scanRangeParams);
updateScanRangeNumByScanRange(scanRangeParams);
}
}
}
private void computeInstanceParam(PlanFragmentId fragmentId,
int parallelExecInstanceNum, FragmentExecParams params) {
Map<Integer, TNetworkAddress> bucketSeqToAddress = fragmentIdToSeqToAddressMap.get(fragmentId);
BucketSeqToScanRange bucketSeqToScanRange = fragmentIdBucketSeqToScanRangeMap.get(fragmentId);
Set<Integer> scanNodeIds = fragmentIdToScanNodeIds.get(fragmentId);
Map<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressToScanRanges
= Maps.newHashMap();
for (Map.Entry<Integer, Map<Integer, List<TScanRangeParams>>> scanRanges
: bucketSeqToScanRange.entrySet()) {
TNetworkAddress address = bucketSeqToAddress.get(scanRanges.getKey());
Map<Integer, List<TScanRangeParams>> nodeScanRanges = scanRanges.getValue();
Map<Integer, List<TScanRangeParams>> filteredNodeScanRanges = Maps.newHashMap();
for (Integer scanNodeId : nodeScanRanges.keySet()) {
if (scanNodeIds.contains(scanNodeId)) {
filteredNodeScanRanges.put(scanNodeId, nodeScanRanges.get(scanNodeId));
}
}
Pair<Integer, Map<Integer, List<TScanRangeParams>>> filteredScanRanges
= Pair.of(scanRanges.getKey(), filteredNodeScanRanges);
if (!addressToScanRanges.containsKey(address)) {
addressToScanRanges.put(address, Lists.newArrayList());
}
addressToScanRanges.get(address).add(filteredScanRanges);
}
FragmentScanRangeAssignment assignment = params.scanRangeAssignment;
for (Map.Entry<TNetworkAddress, List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> addressScanRange
: addressToScanRanges.entrySet()) {
List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> scanRange = addressScanRange.getValue();
Map<Integer, List<TScanRangeParams>> range
= findOrInsert(assignment, addressScanRange.getKey(), new HashMap<>());
int expectedInstanceNum = 1;
if (parallelExecInstanceNum > 1) {
expectedInstanceNum = Math.min(scanRange.size(), parallelExecInstanceNum);
}
List<List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>>> perInstanceScanRanges
= ListUtil.splitBySize(scanRange, expectedInstanceNum);
for (List<Pair<Integer, Map<Integer, List<TScanRangeParams>>>> perInstanceScanRange
: perInstanceScanRanges) {
FInstanceExecParam instanceParam = new FInstanceExecParam(
null, addressScanRange.getKey(), 0, params);
for (Pair<Integer, Map<Integer, List<TScanRangeParams>>> nodeScanRangeMap : perInstanceScanRange) {
instanceParam.addBucketSeq(nodeScanRangeMap.first);
for (Map.Entry<Integer, List<TScanRangeParams>> nodeScanRange
: nodeScanRangeMap.second.entrySet()) {
if (!instanceParam.perNodeScanRanges.containsKey(nodeScanRange.getKey())) {
range.put(nodeScanRange.getKey(), Lists.newArrayList());
instanceParam.perNodeScanRanges.put(nodeScanRange.getKey(), Lists.newArrayList());
}
range.get(nodeScanRange.getKey()).addAll(nodeScanRange.getValue());
instanceParam.perNodeScanRanges.get(nodeScanRange.getKey())
.addAll(nodeScanRange.getValue());
}
}
params.instanceExecParams.add(instanceParam);
}
}
}
}
private final Map<PlanFragmentId, BucketSeqToScanRange> fragmentIdTobucketSeqToScanRangeMap = Maps.newHashMap();
private final Map<PlanFragmentId, Map<Integer, TNetworkAddress>> fragmentIdToSeqToAddressMap = Maps.newHashMap();
private final Map<PlanFragmentId, Set<Integer>> fragmentIdToScanNodeIds = Maps.newHashMap();
private final Set<Integer> colocateFragmentIds = new HashSet<>();
private final BucketShuffleJoinController bucketShuffleJoinController
= new BucketShuffleJoinController(fragmentIdToScanNodeIds);
public class BackendExecState {
TExecPlanFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
int profileFragmentId;
RuntimeProfile instanceProfile;
RuntimeProfile loadChannelProfile;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
TUniqueId instanceId;
public BackendExecState(PlanFragmentId fragmentId, int instanceId, int profileFragmentId,
TExecPlanFragmentParams rpcParams, Map<TNetworkAddress, Long> addressToBackendID,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.initiated = false;
this.done = false;
FInstanceExecParam fi = fragmentExecParamsMap.get(fragmentId).instanceExecParams.get(instanceId);
this.instanceId = fi.instanceId;
this.address = fi.host;
this.backend = idToBackend.get(addressToBackendID.get(address));
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
String name = "Instance " + DebugUtil.printId(fi.instanceId) + " (host=" + address + ")";
this.loadChannelProfile = loadChannelProfile;
this.instanceProfile = new RuntimeProfile(name);
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
if (this.done) {
return false;
}
if (params.isSetProfile()) {
instanceProfile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
this.done = params.done;
if (statsErrorEstimator != null) {
statsErrorEstimator.updateExactReturnedRows(params);
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.instanceProfile.computeTimeInProfile();
this.instanceProfile.prettyPrint(builder, "");
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(fragmentInstanceId()), cancelReason.name());
}
try {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
fragmentInstanceId(), cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
this.hasCanceled = true;
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
instanceProfile.computeTimeInProfile();
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public FragmentInstanceInfo buildFragmentInstanceInfo() {
return new QueryStatisticsItem.FragmentInstanceInfo.Builder().instanceId(fragmentInstanceId())
.fragmentId(String.valueOf(fragmentId)).address(this.address).build();
}
private TUniqueId fragmentInstanceId() {
return this.rpcParams.params.getFragmentInstanceId();
}
}
public class PipelineExecContext {
TPipelineFragmentParams rpcParams;
PlanFragmentId fragmentId;
boolean initiated;
volatile boolean done;
boolean hasCanceled;
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap;
RuntimeProfile loadChannelProfile;
int cancelProgress = 0;
int profileFragmentId;
TNetworkAddress brpcAddress;
TNetworkAddress address;
Backend backend;
long lastMissingHeartbeatTime = -1;
long profileReportProgress = 0;
private final int numInstances;
public PipelineExecContext(PlanFragmentId fragmentId, int profileFragmentId,
TPipelineFragmentParams rpcParams, Long backendId,
Map<TUniqueId, RuntimeProfile> fragmentInstancesMap,
RuntimeProfile loadChannelProfile) {
this.profileFragmentId = profileFragmentId;
this.fragmentId = fragmentId;
this.rpcParams = rpcParams;
this.numInstances = rpcParams.local_params.size();
this.fragmentInstancesMap = fragmentInstancesMap;
this.loadChannelProfile = loadChannelProfile;
this.initiated = false;
this.done = false;
this.backend = idToBackend.get(backendId);
this.address = new TNetworkAddress(backend.getHost(), backend.getBePort());
this.brpcAddress = new TNetworkAddress(backend.getHost(), backend.getBrpcPort());
this.hasCanceled = false;
this.lastMissingHeartbeatTime = backend.getLastMissingHeartbeatTime();
}
/**
* Some information common to all Fragments does not need to be sent repeatedly.
* Therefore, when we confirm that a certain BE has accepted the information,
* we will delete the information in the subsequent Fragment to avoid repeated sending.
* This information can be obtained from the cache of BE.
*/
public void unsetFields() {
this.rpcParams.unsetDescTbl();
this.rpcParams.unsetFileScanParams();
this.rpcParams.unsetCoord();
this.rpcParams.unsetQueryGlobals();
this.rpcParams.unsetResourceInfo();
this.rpcParams.setIsSimplifiedParam(true);
}
public synchronized boolean updateProfile(TReportExecStatusParams params) {
RuntimeProfile profile = fragmentInstancesMap.get(params.fragment_instance_id);
if (params.done && profile.getIsDone()) {
return false;
}
if (params.isSetProfile()) {
profile.update(params.profile);
}
if (params.isSetLoadChannelProfile()) {
loadChannelProfile.update(params.loadChannelProfile);
}
if (params.done) {
profile.setIsDone(true);
profileReportProgress++;
}
if (profileReportProgress == numInstances) {
this.done = true;
}
return true;
}
public synchronized void printProfile(StringBuilder builder) {
this.fragmentInstancesMap.values().stream().forEach(p -> {
p.computeTimeInProfile();
p.prettyPrint(builder, "");
});
}
public synchronized boolean cancelFragmentInstance(Types.PPlanFragmentCancelReason cancelReason) {
if (!this.initiated) {
return false;
}
if (this.done) {
return false;
}
if (this.hasCanceled) {
return false;
}
for (TPipelineInstanceParams localParam : rpcParams.local_params) {
if (LOG.isDebugEnabled()) {
LOG.debug("cancelRemoteFragments initiated={} done={} hasCanceled={} backend: {},"
+ " fragment instance id={}, reason: {}",
this.initiated, this.done, this.hasCanceled, backend.getId(),
DebugUtil.printId(localParam.fragment_instance_id), cancelReason.name());
}
RuntimeProfile profile = fragmentInstancesMap.get(localParam.fragment_instance_id);
if (profile.getIsDone() || profile.getIsCancel()) {
continue;
}
this.hasCanceled = true;
try {
Span span = ConnectContext.get() != null
? ConnectContext.get().getTracer().spanBuilder("cancelPlanFragmentAsync")
.setParent(Context.current()).setSpanKind(SpanKind.CLIENT).startSpan()
: Telemetry.getNoopSpan();
try (Scope scope = span.makeCurrent()) {
BackendServiceProxy.getInstance().cancelPlanFragmentAsync(brpcAddress,
localParam.fragment_instance_id, cancelReason);
} catch (RpcException e) {
span.recordException(e);
LOG.warn("cancel plan fragment get a exception, address={}:{}", brpcAddress.getHostname(),
brpcAddress.getPort());
SimpleScheduler.addToBlacklist(addressToBackendID.get(brpcAddress), e.getMessage());
} finally {
span.end();
}
} catch (Exception e) {
LOG.warn("catch a exception", e);
return false;
}
}
if (!this.hasCanceled) {
return false;
}
for (int i = 0; i < this.numInstances; i++) {
fragmentInstancesMap.get(rpcParams.local_params.get(i).fragment_instance_id).setIsCancel(true);
}
cancelProgress = numInstances;
return true;
}
public synchronized boolean computeTimeInProfile(int maxFragmentId) {
if (this.profileFragmentId < 0 || this.profileFragmentId > maxFragmentId) {
LOG.warn("profileFragmentId {} should be in [0, {})", profileFragmentId, maxFragmentId);
return false;
}
return true;
}
public boolean isBackendStateHealthy() {
if (backend.getLastMissingHeartbeatTime() > lastMissingHeartbeatTime && !backend.isAlive()) {
LOG.warn("backend {} is down while joining the coordinator. job id: {}",
backend.getId(), jobId);
return false;
}
return true;
}
public List<QueryStatisticsItem.FragmentInstanceInfo> buildFragmentInstanceInfo() {
return this.rpcParams.local_params.stream().map(it -> new FragmentInstanceInfo.Builder()
.instanceId(it.fragment_instance_id).fragmentId(String.valueOf(fragmentId))
.address(this.address).build()).collect(Collectors.toList());
}
}
/**
* A set of BackendExecState for same Backend
*/
public class BackendExecStates {
long beId;
TNetworkAddress brpcAddr;
List<BackendExecState> states = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
public BackendExecStates(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
}
public void addState(BackendExecState state) {
this.states.add(state);
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (BackendExecState state : states) {
if (first) {
first = false;
continue;
}
state.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TExecPlanFragmentParamsList paramsList = new TExecPlanFragmentParamsList();
for (BackendExecState state : states) {
state.initiated = true;
paramsList.addToParamsList(state.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
public class PipelineExecContexts {
long beId;
TNetworkAddress brpcAddr;
List<PipelineExecContext> ctxs = Lists.newArrayList();
boolean twoPhaseExecution = false;
ScopedSpan scopedSpan = new ScopedSpan();
int instanceNumber;
public PipelineExecContexts(long beId, TNetworkAddress brpcAddr, boolean twoPhaseExecution,
int instanceNumber) {
this.beId = beId;
this.brpcAddr = brpcAddr;
this.twoPhaseExecution = twoPhaseExecution;
this.instanceNumber = instanceNumber;
}
public void addContext(PipelineExecContext ctx) {
this.ctxs.add(ctx);
}
public int getInstanceNumber() {
return instanceNumber;
}
/**
* The BackendExecState in states are all send to the same BE.
* So only the first BackendExecState need to carry some common fields, such as DescriptorTbl,
* the other BackendExecState does not need those fields. Unset them to reduce size.
*/
public void unsetFields() {
boolean first = true;
for (PipelineExecContext ctx : ctxs) {
if (first) {
first = false;
continue;
}
ctx.unsetFields();
}
}
public Future<InternalService.PExecPlanFragmentResult> execRemoteFragmentsAsync(BackendServiceProxy proxy)
throws TException {
try {
TPipelineFragmentParamsList paramsList = new TPipelineFragmentParamsList();
for (PipelineExecContext cts : ctxs) {
cts.initiated = true;
paramsList.addToParamsList(cts.rpcParams);
}
return proxy.execPlanFragmentsAsync(brpcAddr, paramsList, twoPhaseExecution);
} catch (RpcException e) {
return futureWithException(e);
}
}
public Future<InternalService.PExecPlanFragmentResult> execPlanFragmentStartAsync(BackendServiceProxy proxy)
throws TException {
try {
PExecPlanFragmentStartRequest.Builder builder = PExecPlanFragmentStartRequest.newBuilder();
PUniqueId qid = PUniqueId.newBuilder().setHi(queryId.hi).setLo(queryId.lo).build();
builder.setQueryId(qid);
return proxy.execPlanFragmentStartAsync(brpcAddr, builder.build());
} catch (RpcException e) {
return futureWithException(e);
}
}
@NotNull
private Future<PExecPlanFragmentResult> futureWithException(RpcException e) {
return new Future<PExecPlanFragmentResult>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
@Override
public PExecPlanFragmentResult get() {
PExecPlanFragmentResult result = PExecPlanFragmentResult.newBuilder().setStatus(
Types.PStatus.newBuilder().addErrorMsgs(e.getMessage())
.setStatusCode(TStatusCode.THRIFT_RPC_ERROR.getValue()).build()).build();
return result;
}
@Override
public PExecPlanFragmentResult get(long timeout, TimeUnit unit) {
return get();
}
};
}
}
protected class FragmentExecParams {
public PlanFragment fragment;
public List<TPlanFragmentDestination> destinations = Lists.newArrayList();
public Map<Integer, Integer> perExchNumSenders = Maps.newHashMap();
public List<PlanFragmentId> inputFragments = Lists.newArrayList();
public List<FInstanceExecParam> instanceExecParams = Lists.newArrayList();
public FragmentScanRangeAssignment scanRangeAssignment = new FragmentScanRangeAssignment();
public FragmentExecParams(PlanFragment fragment) {
this.fragment = fragment;
}
List<TExecPlanFragmentParams> toThrift(int backendNum) {
List<TExecPlanFragmentParams> paramsList = Lists.newArrayList();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
TExecPlanFragmentParams params = new TExecPlanFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setFragment(fragment.toThrift());
params.setDescTbl(descTable);
params.setParams(new TPlanFragmentExecParams());
params.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
params.params.setQueryId(queryId);
params.params.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
}
params.params.setPerNodeScanRanges(scanRanges);
params.params.setPerExchNumSenders(perExchNumSenders);
params.params.setDestinations(destinations);
params.params.setSenderId(i);
params.params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setBackendNum(backendNum++);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(false);
params.params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.params.setRuntimeFilterParams(new TRuntimeFilterParams());
params.params.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
params.params.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
params.params.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
params.params.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
params.params.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.setFileScanParams(fileScanRangeParamsMap);
paramsList.add(params);
}
return paramsList;
}
Map<TNetworkAddress, TPipelineFragmentParams> toTPipelineParams(int backendNum) {
long memLimit = queryOptions.getMemLimit();
if (colocateFragmentIds.contains(fragment.getFragmentId().asInt())) {
int rate = Math.min(Config.query_colocate_join_memory_limit_penalty_factor, instanceExecParams.size());
memLimit = queryOptions.getMemLimit() / rate;
}
Map<TNetworkAddress, TPipelineFragmentParams> res = new HashMap();
for (int i = 0; i < instanceExecParams.size(); ++i) {
final FInstanceExecParam instanceExecParam = instanceExecParams.get(i);
if (!res.containsKey(instanceExecParam.host)) {
TPipelineFragmentParams params = new TPipelineFragmentParams();
params.setProtocolVersion(PaloInternalServiceVersion.V1);
params.setDescTbl(descTable);
params.setQueryId(queryId);
params.setPerExchNumSenders(perExchNumSenders);
params.setDestinations(destinations);
params.setNumSenders(instanceExecParams.size());
params.setCoord(coordAddress);
params.setQueryGlobals(queryGlobals);
params.setQueryOptions(queryOptions);
params.query_options.setEnablePipelineEngine(true);
params.query_options.setMemLimit(memLimit);
params.setSendQueryStatisticsWithEveryBatch(
fragment.isTransferQueryStatisticsWithEveryBatch());
params.setFragment(fragment.toThrift());
params.setLocalParams(Lists.newArrayList());
if (tWorkloadGroups != null) {
params.setWorkloadGroups(tWorkloadGroups);
}
params.setFileScanParams(fileScanRangeParamsMap);
res.put(instanceExecParam.host, params);
}
TPipelineFragmentParams params = res.get(instanceExecParam.host);
TPipelineInstanceParams localParams = new TPipelineInstanceParams();
localParams.setBuildHashTableForBroadcastJoin(instanceExecParam.buildHashTableForBroadcastJoin);
localParams.setFragmentInstanceId(instanceExecParam.instanceId);
Map<Integer, List<TScanRangeParams>> scanRanges = instanceExecParam.perNodeScanRanges;
Map<Integer, Boolean> perNodeSharedScans = instanceExecParam.perNodeSharedScans;
if (scanRanges == null) {
scanRanges = Maps.newHashMap();
perNodeSharedScans = Maps.newHashMap();
}
localParams.setPerNodeScanRanges(scanRanges);
localParams.setPerNodeSharedScans(perNodeSharedScans);
localParams.setSenderId(i);
localParams.setBackendNum(backendNum++);
localParams.setRuntimeFilterParams(new TRuntimeFilterParams());
localParams.runtime_filter_params.setRuntimeFilterMergeAddr(runtimeFilterMergeAddr);
if (instanceExecParam.instanceId.equals(runtimeFilterMergeInstanceId)) {
for (RuntimeFilter rf : assignedRuntimeFilters) {
if (!ridToTargetParam.containsKey(rf.getFilterId())) {
continue;
}
List<FRuntimeFilterTargetParam> fParams = ridToTargetParam.get(rf.getFilterId());
rf.computeUseRemoteRfOpt();
if (rf.getUseRemoteRfOpt()) {
Map<TNetworkAddress, TRuntimeFilterTargetParamsV2> targetParamsV2 = new HashMap<>();
for (FRuntimeFilterTargetParam targetParam : fParams) {
if (targetParamsV2.containsKey(targetParam.targetFragmentInstanceAddr)) {
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
} else {
targetParamsV2.put(targetParam.targetFragmentInstanceAddr,
new TRuntimeFilterTargetParamsV2());
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_addr
= targetParam.targetFragmentInstanceAddr;
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
= new ArrayList<>();
targetParamsV2.get(targetParam.targetFragmentInstanceAddr)
.target_fragment_instance_ids
.add(targetParam.targetFragmentInstanceId);
}
}
localParams.runtime_filter_params.putToRidToTargetParamv2(rf.getFilterId().asInt(),
new ArrayList<TRuntimeFilterTargetParamsV2>(targetParamsV2.values()));
} else {
List<TRuntimeFilterTargetParams> targetParams = Lists.newArrayList();
for (FRuntimeFilterTargetParam targetParam : fParams) {
targetParams.add(new TRuntimeFilterTargetParams(targetParam.targetFragmentInstanceId,
targetParam.targetFragmentInstanceAddr));
}
localParams.runtime_filter_params.putToRidToTargetParam(rf.getFilterId().asInt(),
targetParams);
}
}
for (Map.Entry<RuntimeFilterId, Integer> entry : ridToBuilderNum.entrySet()) {
localParams.runtime_filter_params.putToRuntimeFilterBuilderNum(
entry.getKey().asInt(), entry.getValue());
}
for (RuntimeFilter rf : assignedRuntimeFilters) {
localParams.runtime_filter_params.putToRidToRuntimeFilter(
rf.getFilterId().asInt(), rf.toThrift());
}
}
params.getLocalParams().add(localParams);
}
return res;
}
public void appendScanRange(StringBuilder sb, List<TScanRangeParams> params) {
sb.append("range=[");
int idx = 0;
for (TScanRangeParams range : params) {
TPaloScanRange paloScanRange = range.getScanRange().getPaloScanRange();
if (paloScanRange != null) {
if (idx++ != 0) {
sb.append(",");
}
sb.append("{tid=").append(paloScanRange.getTabletId())
.append(",ver=").append(paloScanRange.getVersion()).append("}");
}
TEsScanRange esScanRange = range.getScanRange().getEsScanRange();
if (esScanRange != null) {
sb.append("{ index=").append(esScanRange.getIndex())
.append(", shardid=").append(esScanRange.getShardId())
.append("}");
}
}
sb.append("]");
}
public void appendTo(StringBuilder sb) {
sb.append("{plan=");
fragment.getPlanRoot().appendTrace(sb);
sb.append(",instance=[");
for (int i = 0; i < instanceExecParams.size(); ++i) {
if (i != 0) {
sb.append(",");
}
TNetworkAddress address = instanceExecParams.get(i).host;
Map<Integer, List<TScanRangeParams>> scanRanges =
scanRangeAssignment.get(address);
sb.append("{");
sb.append("id=").append(DebugUtil.printId(instanceExecParams.get(i).instanceId));
sb.append(",host=").append(instanceExecParams.get(i).host);
if (scanRanges == null) {
sb.append("}");
continue;
}
sb.append(",range=[");
int eIdx = 0;
for (Map.Entry<Integer, List<TScanRangeParams>> entry : scanRanges.entrySet()) {
if (eIdx++ != 0) {
sb.append(",");
}
sb.append("id").append(entry.getKey()).append(",");
appendScanRange(sb, entry.getValue());
}
sb.append("]");
sb.append("}");
}
sb.append("]");
sb.append("}");
}
} |